From 2e2f6364dc10807bfccdd1a07d5d8bf827a1c7c5 Mon Sep 17 00:00:00 2001 From: Mark Bates Date: Mon, 21 Oct 2019 16:23:18 -0400 Subject: [PATCH] one and a half stars --- Makefile | 6 +- here/info.go | 57 - here/internal/pathparser/constructors.go | 65 + here/internal/pathparser/parser.go | 1620 ++++++++++++++++++++++ here/internal/pathparser/parser.peg | 37 + here/internal/pathparser/parser_test.go | 89 ++ here/internal/pathparser/path.go | 15 + here/parse.go | 44 + pkger_test.go | 51 +- pkging/pkgtest/file.go | 11 +- pkging/pkgtest/http.go | 1 - pkging/pkgtest/suite.go | 76 +- pkging/pkgtest/util.go | 9 +- 13 files changed, 1937 insertions(+), 144 deletions(-) create mode 100644 here/internal/pathparser/constructors.go create mode 100644 here/internal/pathparser/parser.go create mode 100644 here/internal/pathparser/parser.peg create mode 100644 here/internal/pathparser/parser_test.go create mode 100644 here/internal/pathparser/path.go create mode 100644 here/parse.go diff --git a/Makefile b/Makefile index 304b497..25b4b6b 100644 --- a/Makefile +++ b/Makefile @@ -16,7 +16,7 @@ build: tidy $(GO_BIN) build -v . make tidy -test: tidy +test: tidy peg $(GO_BIN) test -cover -tags ${TAGS} -timeout 5s ./... make tidy @@ -51,4 +51,6 @@ release: release -y -f version.go --skip-packr make tidy - +peg: + pigeon here/internal/pathparser/parser.peg | goimports > here/internal/pathparser/parser.go + go test ./here/internal/pathparser diff --git a/here/info.go b/here/info.go index ab56e4d..37a301b 100644 --- a/here/info.go +++ b/here/info.go @@ -2,10 +2,8 @@ package here import ( "encoding/json" - "fmt" "os" "path/filepath" - "regexp" "runtime" "strings" @@ -96,58 +94,3 @@ func (i Info) String() string { s := string(b) return s } - -func (i Info) Parse(p string) (Path, error) { - p = strings.TrimSpace(p) - p = filepath.Clean(p) - p = strings.TrimPrefix(p, i.Dir) - - p = strings.Replace(p, "\\", "/", -1) - p = strings.TrimSpace(p) - - if len(p) == 0 || p == ":" { - return i.build("", "", "") - } - - res := pathrx.FindAllStringSubmatch(p, -1) - if len(res) == 0 { - return Path{}, fmt.Errorf("could not parse %q", p) - } - - matches := res[0] - - if len(matches) != 4 { - return Path{}, fmt.Errorf("could not parse %q", p) - } - - return i.build(p, matches[1], matches[3]) -} - -func (i Info) build(p, pkg, name string) (Path, error) { - pt := Path{ - Pkg: pkg, - Name: name, - } - - if strings.HasPrefix(pt.Pkg, "/") || len(pt.Pkg) == 0 { - pt.Name = pt.Pkg - pt.Pkg = i.ImportPath - } - - if len(pt.Name) == 0 { - pt.Name = "/" - } - - if pt.Pkg == pt.Name { - pt.Pkg = i.ImportPath - pt.Name = "/" - } - - if !strings.HasPrefix(pt.Name, "/") { - pt.Name = "/" + pt.Name - } - pt.Name = strings.TrimPrefix(pt.Name, i.Dir) - return pt, nil -} - -var pathrx = regexp.MustCompile("([^:]+)(:(/.+))?") diff --git a/here/internal/pathparser/constructors.go b/here/internal/pathparser/constructors.go new file mode 100644 index 0000000..5398798 --- /dev/null +++ b/here/internal/pathparser/constructors.go @@ -0,0 +1,65 @@ +package pathparser + +import ( + "fmt" + "strings" +) + +func toString(i interface{}) (string, error) { + if i == nil { + return "", nil + } + if s, ok := i.(string); ok { + return s, nil + } + return "", fmt.Errorf("%T is not a string", i) +} + +func toName(i interface{}) (string, error) { + s, err := toString(i) + if err != nil { + return "", err + } + if !strings.HasPrefix(s, "/") { + s = "/" + s + } + return s, nil +} + +func toPath(pkg, name interface{}) (*Path, error) { + n, err := toString(name) + if err != nil { + return nil, err + } + + pg, _ := pkg.(*Package) + p := &Path{ + Name: n, + Pkg: pg, + } + // if p.IsZero() { + // return nil, fmt.Errorf("empty path") + // } + + if p.Name == "" { + p.Name = "/" + } + return p, nil +} + +func toPackage(n, v interface{}) (*Package, error) { + name, err := toString(n) + if err != nil { + return nil, err + } + + var version string + if s, ok := v.(string); ok { + version = s + } + + return &Package{ + Name: name, + Version: version, + }, nil +} diff --git a/here/internal/pathparser/parser.go b/here/internal/pathparser/parser.go new file mode 100644 index 0000000..20ec9a4 --- /dev/null +++ b/here/internal/pathparser/parser.go @@ -0,0 +1,1620 @@ +// Code generated by pigeon; DO NOT EDIT. + +package pathparser + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "math" + "os" + "sort" + "strconv" + "strings" + "unicode" + "unicode/utf8" +) + +var g = &grammar{ + rules: []*rule{ + { + name: "DOC", + pos: position{line: 5, col: 1, offset: 26}, + expr: &actionExpr{ + pos: position{line: 5, col: 8, offset: 33}, + run: (*parser).callonDOC1, + expr: &seqExpr{ + pos: position{line: 5, col: 8, offset: 33}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 5, col: 8, offset: 33}, + label: "pkg", + expr: &zeroOrOneExpr{ + pos: position{line: 5, col: 12, offset: 37}, + expr: &ruleRefExpr{ + pos: position{line: 5, col: 12, offset: 37}, + name: "PKG", + }, + }, + }, + &labeledExpr{ + pos: position{line: 5, col: 17, offset: 42}, + label: "n", + expr: &zeroOrOneExpr{ + pos: position{line: 5, col: 19, offset: 44}, + expr: &ruleRefExpr{ + pos: position{line: 5, col: 19, offset: 44}, + name: "NAME", + }, + }, + }, + &choiceExpr{ + pos: position{line: 5, col: 26, offset: 51}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 5, col: 26, offset: 51}, + name: "nl", + }, + &ruleRefExpr{ + pos: position{line: 5, col: 31, offset: 56}, + name: "EOF", + }, + }, + }, + }, + }, + }, + }, + { + name: "PKG", + pos: position{line: 9, col: 1, offset: 92}, + expr: &actionExpr{ + pos: position{line: 9, col: 8, offset: 99}, + run: (*parser).callonPKG1, + expr: &seqExpr{ + pos: position{line: 9, col: 8, offset: 99}, + exprs: []interface{}{ + ¬Expr{ + pos: position{line: 9, col: 8, offset: 99}, + expr: &litMatcher{ + pos: position{line: 9, col: 9, offset: 100}, + val: "/", + ignoreCase: false, + }, + }, + &labeledExpr{ + pos: position{line: 9, col: 13, offset: 104}, + label: "n", + expr: &ruleRefExpr{ + pos: position{line: 9, col: 15, offset: 106}, + name: "STRING", + }, + }, + &labeledExpr{ + pos: position{line: 9, col: 22, offset: 113}, + label: "v", + expr: &zeroOrOneExpr{ + pos: position{line: 9, col: 24, offset: 115}, + expr: &ruleRefExpr{ + pos: position{line: 9, col: 25, offset: 116}, + name: "VERSION", + }, + }, + }, + &zeroOrOneExpr{ + pos: position{line: 9, col: 35, offset: 126}, + expr: &litMatcher{ + pos: position{line: 9, col: 35, offset: 126}, + val: ":", + ignoreCase: false, + }, + }, + }, + }, + }, + }, + { + name: "NAME", + pos: position{line: 13, col: 1, offset: 163}, + expr: &actionExpr{ + pos: position{line: 13, col: 9, offset: 171}, + run: (*parser).callonNAME1, + expr: &seqExpr{ + pos: position{line: 13, col: 9, offset: 171}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 13, col: 9, offset: 171}, + val: "/", + ignoreCase: false, + }, + &labeledExpr{ + pos: position{line: 13, col: 13, offset: 175}, + label: "n", + expr: &ruleRefExpr{ + pos: position{line: 13, col: 15, offset: 177}, + name: "STRING", + }, + }, + }, + }, + }, + }, + { + name: "VERSION", + pos: position{line: 17, col: 1, offset: 208}, + expr: &actionExpr{ + pos: position{line: 17, col: 12, offset: 219}, + run: (*parser).callonVERSION1, + expr: &seqExpr{ + pos: position{line: 17, col: 12, offset: 219}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 17, col: 12, offset: 219}, + val: "@", + ignoreCase: false, + }, + &seqExpr{ + pos: position{line: 17, col: 18, offset: 225}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 17, col: 18, offset: 225}, + val: "v", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 17, col: 22, offset: 229}, + name: "NUMBER", + }, + &litMatcher{ + pos: position{line: 17, col: 29, offset: 236}, + val: ".", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 17, col: 33, offset: 240}, + name: "NUMBER", + }, + &litMatcher{ + pos: position{line: 17, col: 40, offset: 247}, + val: ".", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 17, col: 44, offset: 251}, + name: "NUMBER", + }, + }, + }, + }, + }, + }, + }, + { + name: "STRING", + pos: position{line: 21, col: 1, offset: 319}, + expr: &actionExpr{ + pos: position{line: 21, col: 11, offset: 329}, + run: (*parser).callonSTRING1, + expr: &oneOrMoreExpr{ + pos: position{line: 21, col: 12, offset: 330}, + expr: &charClassMatcher{ + pos: position{line: 21, col: 12, offset: 330}, + val: "[^@:\\\\\\n]", + chars: []rune{'@', ':', '\\', '\n'}, + ignoreCase: false, + inverted: true, + }, + }, + }, + }, + { + name: "NUMBER", + pos: position{line: 25, col: 1, offset: 397}, + expr: &actionExpr{ + pos: position{line: 25, col: 11, offset: 407}, + run: (*parser).callonNUMBER1, + expr: &oneOrMoreExpr{ + pos: position{line: 25, col: 11, offset: 407}, + expr: &charClassMatcher{ + pos: position{line: 25, col: 11, offset: 407}, + val: "[0-9]", + ranges: []rune{'0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + }, + { + name: "_", + displayName: "\"whitespace\"", + pos: position{line: 29, col: 1, offset: 477}, + expr: &zeroOrMoreExpr{ + pos: position{line: 29, col: 18, offset: 496}, + expr: &charClassMatcher{ + pos: position{line: 29, col: 18, offset: 496}, + val: "[ \\t]", + chars: []rune{' ', '\t'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + { + name: "nl", + displayName: "\"newline\"", + pos: position{line: 31, col: 1, offset: 504}, + expr: &zeroOrMoreExpr{ + pos: position{line: 31, col: 16, offset: 521}, + expr: &charClassMatcher{ + pos: position{line: 31, col: 16, offset: 521}, + val: "[\\n\\r]", + chars: []rune{'\n', '\r'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + { + name: "EOF", + pos: position{line: 33, col: 1, offset: 530}, + expr: &actionExpr{ + pos: position{line: 33, col: 8, offset: 537}, + run: (*parser).callonEOF1, + expr: ¬Expr{ + pos: position{line: 33, col: 8, offset: 537}, + expr: &anyMatcher{ + line: 33, col: 9, offset: 538, + }, + }, + }, + }, + }, +} + +func (c *current) onDOC1(pkg, n interface{}) (interface{}, error) { + return toPath(pkg, n) +} + +func (p *parser) callonDOC1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onDOC1(stack["pkg"], stack["n"]) +} + +func (c *current) onPKG1(n, v interface{}) (interface{}, error) { + return toPackage(n, v) +} + +func (p *parser) callonPKG1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onPKG1(stack["n"], stack["v"]) +} + +func (c *current) onNAME1(n interface{}) (interface{}, error) { + return toName(n) +} + +func (p *parser) callonNAME1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNAME1(stack["n"]) +} + +func (c *current) onVERSION1() (interface{}, error) { + return strings.TrimPrefix(string(c.text), "@"), nil +} + +func (p *parser) callonVERSION1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onVERSION1() +} + +func (c *current) onSTRING1() (interface{}, error) { + return strings.TrimSpace(string(c.text)), nil +} + +func (p *parser) callonSTRING1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onSTRING1() +} + +func (c *current) onNUMBER1() (interface{}, error) { + return strconv.Atoi(strings.TrimSpace(string(c.text))) +} + +func (p *parser) callonNUMBER1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNUMBER1() +} + +func (c *current) onEOF1() (interface{}, error) { + return nil, nil +} + +func (p *parser) callonEOF1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onEOF1() +} + +var ( + // errNoRule is returned when the grammar to parse has no rule. + errNoRule = errors.New("grammar has no rule") + + // errInvalidEntrypoint is returned when the specified entrypoint rule + // does not exit. + errInvalidEntrypoint = errors.New("invalid entrypoint") + + // errInvalidEncoding is returned when the source is not properly + // utf8-encoded. + errInvalidEncoding = errors.New("invalid encoding") + + // errMaxExprCnt is used to signal that the maximum number of + // expressions have been parsed. + errMaxExprCnt = errors.New("max number of expresssions parsed") +) + +// Option is a function that can set an option on the parser. It returns +// the previous setting as an Option. +type Option func(*parser) Option + +// MaxExpressions creates an Option to stop parsing after the provided +// number of expressions have been parsed, if the value is 0 then the parser will +// parse for as many steps as needed (possibly an infinite number). +// +// The default for maxExprCnt is 0. +func MaxExpressions(maxExprCnt uint64) Option { + return func(p *parser) Option { + oldMaxExprCnt := p.maxExprCnt + p.maxExprCnt = maxExprCnt + return MaxExpressions(oldMaxExprCnt) + } +} + +// Entrypoint creates an Option to set the rule name to use as entrypoint. +// The rule name must have been specified in the -alternate-entrypoints +// if generating the parser with the -optimize-grammar flag, otherwise +// it may have been optimized out. Passing an empty string sets the +// entrypoint to the first rule in the grammar. +// +// The default is to start parsing at the first rule in the grammar. +func Entrypoint(ruleName string) Option { + return func(p *parser) Option { + oldEntrypoint := p.entrypoint + p.entrypoint = ruleName + if ruleName == "" { + p.entrypoint = g.rules[0].name + } + return Entrypoint(oldEntrypoint) + } +} + +// Statistics adds a user provided Stats struct to the parser to allow +// the user to process the results after the parsing has finished. +// Also the key for the "no match" counter is set. +// +// Example usage: +// +// input := "input" +// stats := Stats{} +// _, err := Parse("input-file", []byte(input), Statistics(&stats, "no match")) +// if err != nil { +// log.Panicln(err) +// } +// b, err := json.MarshalIndent(stats.ChoiceAltCnt, "", " ") +// if err != nil { +// log.Panicln(err) +// } +// fmt.Println(string(b)) +// +func Statistics(stats *Stats, choiceNoMatch string) Option { + return func(p *parser) Option { + oldStats := p.Stats + p.Stats = stats + oldChoiceNoMatch := p.choiceNoMatch + p.choiceNoMatch = choiceNoMatch + if p.Stats.ChoiceAltCnt == nil { + p.Stats.ChoiceAltCnt = make(map[string]map[string]int) + } + return Statistics(oldStats, oldChoiceNoMatch) + } +} + +// Debug creates an Option to set the debug flag to b. When set to true, +// debugging information is printed to stdout while parsing. +// +// The default is false. +func Debug(b bool) Option { + return func(p *parser) Option { + old := p.debug + p.debug = b + return Debug(old) + } +} + +// Memoize creates an Option to set the memoize flag to b. When set to true, +// the parser will cache all results so each expression is evaluated only +// once. This guarantees linear parsing time even for pathological cases, +// at the expense of more memory and slower times for typical cases. +// +// The default is false. +func Memoize(b bool) Option { + return func(p *parser) Option { + old := p.memoize + p.memoize = b + return Memoize(old) + } +} + +// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes. +// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD) +// by character class matchers and is matched by the any matcher. +// The returned matched value, c.text and c.offset are NOT affected. +// +// The default is false. +func AllowInvalidUTF8(b bool) Option { + return func(p *parser) Option { + old := p.allowInvalidUTF8 + p.allowInvalidUTF8 = b + return AllowInvalidUTF8(old) + } +} + +// Recover creates an Option to set the recover flag to b. When set to +// true, this causes the parser to recover from panics and convert it +// to an error. Setting it to false can be useful while debugging to +// access the full stack trace. +// +// The default is true. +func Recover(b bool) Option { + return func(p *parser) Option { + old := p.recover + p.recover = b + return Recover(old) + } +} + +// GlobalStore creates an Option to set a key to a certain value in +// the globalStore. +func GlobalStore(key string, value interface{}) Option { + return func(p *parser) Option { + old := p.cur.globalStore[key] + p.cur.globalStore[key] = value + return GlobalStore(key, old) + } +} + +// InitState creates an Option to set a key to a certain value in +// the global "state" store. +func InitState(key string, value interface{}) Option { + return func(p *parser) Option { + old := p.cur.state[key] + p.cur.state[key] = value + return InitState(key, old) + } +} + +// ParseFile parses the file identified by filename. +func ParseFile(filename string, opts ...Option) (i interface{}, err error) { + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if closeErr := f.Close(); closeErr != nil { + err = closeErr + } + }() + return ParseReader(filename, f, opts...) +} + +// ParseReader parses the data from r using filename as information in the +// error messages. +func ParseReader(filename string, r io.Reader, opts ...Option) (interface{}, error) { + b, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + + return Parse(filename, b, opts...) +} + +// Parse parses the data from b using filename as information in the +// error messages. +func Parse(filename string, b []byte, opts ...Option) (interface{}, error) { + return newParser(filename, b, opts...).parse(g) +} + +// position records a position in the text. +type position struct { + line, col, offset int +} + +func (p position) String() string { + return fmt.Sprintf("%d:%d [%d]", p.line, p.col, p.offset) +} + +// savepoint stores all state required to go back to this point in the +// parser. +type savepoint struct { + position + rn rune + w int +} + +type current struct { + pos position // start position of the match + text []byte // raw text of the match + + // state is a store for arbitrary key,value pairs that the user wants to be + // tied to the backtracking of the parser. + // This is always rolled back if a parsing rule fails. + state storeDict + + // globalStore is a general store for the user to store arbitrary key-value + // pairs that they need to manage and that they do not want tied to the + // backtracking of the parser. This is only modified by the user and never + // rolled back by the parser. It is always up to the user to keep this in a + // consistent state. + globalStore storeDict +} + +type storeDict map[string]interface{} + +// the AST types... + +type grammar struct { + pos position + rules []*rule +} + +type rule struct { + pos position + name string + displayName string + expr interface{} +} + +type choiceExpr struct { + pos position + alternatives []interface{} +} + +type actionExpr struct { + pos position + expr interface{} + run func(*parser) (interface{}, error) +} + +type recoveryExpr struct { + pos position + expr interface{} + recoverExpr interface{} + failureLabel []string +} + +type seqExpr struct { + pos position + exprs []interface{} +} + +type throwExpr struct { + pos position + label string +} + +type labeledExpr struct { + pos position + label string + expr interface{} +} + +type expr struct { + pos position + expr interface{} +} + +type andExpr expr +type notExpr expr +type zeroOrOneExpr expr +type zeroOrMoreExpr expr +type oneOrMoreExpr expr + +type ruleRefExpr struct { + pos position + name string +} + +type stateCodeExpr struct { + pos position + run func(*parser) error +} + +type andCodeExpr struct { + pos position + run func(*parser) (bool, error) +} + +type notCodeExpr struct { + pos position + run func(*parser) (bool, error) +} + +type litMatcher struct { + pos position + val string + ignoreCase bool +} + +type charClassMatcher struct { + pos position + val string + basicLatinChars [128]bool + chars []rune + ranges []rune + classes []*unicode.RangeTable + ignoreCase bool + inverted bool +} + +type anyMatcher position + +// errList cumulates the errors found by the parser. +type errList []error + +func (e *errList) add(err error) { + *e = append(*e, err) +} + +func (e errList) err() error { + if len(e) == 0 { + return nil + } + e.dedupe() + return e +} + +func (e *errList) dedupe() { + var cleaned []error + set := make(map[string]bool) + for _, err := range *e { + if msg := err.Error(); !set[msg] { + set[msg] = true + cleaned = append(cleaned, err) + } + } + *e = cleaned +} + +func (e errList) Error() string { + switch len(e) { + case 0: + return "" + case 1: + return e[0].Error() + default: + var buf bytes.Buffer + + for i, err := range e { + if i > 0 { + buf.WriteRune('\n') + } + buf.WriteString(err.Error()) + } + return buf.String() + } +} + +// parserError wraps an error with a prefix indicating the rule in which +// the error occurred. The original error is stored in the Inner field. +type parserError struct { + Inner error + pos position + prefix string + expected []string +} + +// Error returns the error message. +func (p *parserError) Error() string { + return p.prefix + ": " + p.Inner.Error() +} + +// newParser creates a parser with the specified input source and options. +func newParser(filename string, b []byte, opts ...Option) *parser { + stats := Stats{ + ChoiceAltCnt: make(map[string]map[string]int), + } + + p := &parser{ + filename: filename, + errs: new(errList), + data: b, + pt: savepoint{position: position{line: 1}}, + recover: true, + cur: current{ + state: make(storeDict), + globalStore: make(storeDict), + }, + maxFailPos: position{col: 1, line: 1}, + maxFailExpected: make([]string, 0, 20), + Stats: &stats, + // start rule is rule [0] unless an alternate entrypoint is specified + entrypoint: g.rules[0].name, + } + p.setOptions(opts) + + if p.maxExprCnt == 0 { + p.maxExprCnt = math.MaxUint64 + } + + return p +} + +// setOptions applies the options to the parser. +func (p *parser) setOptions(opts []Option) { + for _, opt := range opts { + opt(p) + } +} + +type resultTuple struct { + v interface{} + b bool + end savepoint +} + +const choiceNoMatch = -1 + +// Stats stores some statistics, gathered during parsing +type Stats struct { + // ExprCnt counts the number of expressions processed during parsing + // This value is compared to the maximum number of expressions allowed + // (set by the MaxExpressions option). + ExprCnt uint64 + + // ChoiceAltCnt is used to count for each ordered choice expression, + // which alternative is used how may times. + // These numbers allow to optimize the order of the ordered choice expression + // to increase the performance of the parser + // + // The outer key of ChoiceAltCnt is composed of the name of the rule as well + // as the line and the column of the ordered choice. + // The inner key of ChoiceAltCnt is the number (one-based) of the matching alternative. + // For each alternative the number of matches are counted. If an ordered choice does not + // match, a special counter is incremented. The name of this counter is set with + // the parser option Statistics. + // For an alternative to be included in ChoiceAltCnt, it has to match at least once. + ChoiceAltCnt map[string]map[string]int +} + +type parser struct { + filename string + pt savepoint + cur current + + data []byte + errs *errList + + depth int + recover bool + debug bool + + memoize bool + // memoization table for the packrat algorithm: + // map[offset in source] map[expression or rule] {value, match} + memo map[int]map[interface{}]resultTuple + + // rules table, maps the rule identifier to the rule node + rules map[string]*rule + // variables stack, map of label to value + vstack []map[string]interface{} + // rule stack, allows identification of the current rule in errors + rstack []*rule + + // parse fail + maxFailPos position + maxFailExpected []string + maxFailInvertExpected bool + + // max number of expressions to be parsed + maxExprCnt uint64 + // entrypoint for the parser + entrypoint string + + allowInvalidUTF8 bool + + *Stats + + choiceNoMatch string + // recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse + recoveryStack []map[string]interface{} +} + +// push a variable set on the vstack. +func (p *parser) pushV() { + if cap(p.vstack) == len(p.vstack) { + // create new empty slot in the stack + p.vstack = append(p.vstack, nil) + } else { + // slice to 1 more + p.vstack = p.vstack[:len(p.vstack)+1] + } + + // get the last args set + m := p.vstack[len(p.vstack)-1] + if m != nil && len(m) == 0 { + // empty map, all good + return + } + + m = make(map[string]interface{}) + p.vstack[len(p.vstack)-1] = m +} + +// pop a variable set from the vstack. +func (p *parser) popV() { + // if the map is not empty, clear it + m := p.vstack[len(p.vstack)-1] + if len(m) > 0 { + // GC that map + p.vstack[len(p.vstack)-1] = nil + } + p.vstack = p.vstack[:len(p.vstack)-1] +} + +// push a recovery expression with its labels to the recoveryStack +func (p *parser) pushRecovery(labels []string, expr interface{}) { + if cap(p.recoveryStack) == len(p.recoveryStack) { + // create new empty slot in the stack + p.recoveryStack = append(p.recoveryStack, nil) + } else { + // slice to 1 more + p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)+1] + } + + m := make(map[string]interface{}, len(labels)) + for _, fl := range labels { + m[fl] = expr + } + p.recoveryStack[len(p.recoveryStack)-1] = m +} + +// pop a recovery expression from the recoveryStack +func (p *parser) popRecovery() { + // GC that map + p.recoveryStack[len(p.recoveryStack)-1] = nil + + p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)-1] +} + +func (p *parser) print(prefix, s string) string { + if !p.debug { + return s + } + + fmt.Printf("%s %d:%d:%d: %s [%#U]\n", + prefix, p.pt.line, p.pt.col, p.pt.offset, s, p.pt.rn) + return s +} + +func (p *parser) in(s string) string { + p.depth++ + return p.print(strings.Repeat(" ", p.depth)+">", s) +} + +func (p *parser) out(s string) string { + p.depth-- + return p.print(strings.Repeat(" ", p.depth)+"<", s) +} + +func (p *parser) addErr(err error) { + p.addErrAt(err, p.pt.position, []string{}) +} + +func (p *parser) addErrAt(err error, pos position, expected []string) { + var buf bytes.Buffer + if p.filename != "" { + buf.WriteString(p.filename) + } + if buf.Len() > 0 { + buf.WriteString(":") + } + buf.WriteString(fmt.Sprintf("%d:%d (%d)", pos.line, pos.col, pos.offset)) + if len(p.rstack) > 0 { + if buf.Len() > 0 { + buf.WriteString(": ") + } + rule := p.rstack[len(p.rstack)-1] + if rule.displayName != "" { + buf.WriteString("rule " + rule.displayName) + } else { + buf.WriteString("rule " + rule.name) + } + } + pe := &parserError{Inner: err, pos: pos, prefix: buf.String(), expected: expected} + p.errs.add(pe) +} + +func (p *parser) failAt(fail bool, pos position, want string) { + // process fail if parsing fails and not inverted or parsing succeeds and invert is set + if fail == p.maxFailInvertExpected { + if pos.offset < p.maxFailPos.offset { + return + } + + if pos.offset > p.maxFailPos.offset { + p.maxFailPos = pos + p.maxFailExpected = p.maxFailExpected[:0] + } + + if p.maxFailInvertExpected { + want = "!" + want + } + p.maxFailExpected = append(p.maxFailExpected, want) + } +} + +// read advances the parser to the next rune. +func (p *parser) read() { + p.pt.offset += p.pt.w + rn, n := utf8.DecodeRune(p.data[p.pt.offset:]) + p.pt.rn = rn + p.pt.w = n + p.pt.col++ + if rn == '\n' { + p.pt.line++ + p.pt.col = 0 + } + + if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune + if !p.allowInvalidUTF8 { + p.addErr(errInvalidEncoding) + } + } +} + +// restore parser position to the savepoint pt. +func (p *parser) restore(pt savepoint) { + if p.debug { + defer p.out(p.in("restore")) + } + if pt.offset == p.pt.offset { + return + } + p.pt = pt +} + +// Cloner is implemented by any value that has a Clone method, which returns a +// copy of the value. This is mainly used for types which are not passed by +// value (e.g map, slice, chan) or structs that contain such types. +// +// This is used in conjunction with the global state feature to create proper +// copies of the state to allow the parser to properly restore the state in +// the case of backtracking. +type Cloner interface { + Clone() interface{} +} + +// clone and return parser current state. +func (p *parser) cloneState() storeDict { + if p.debug { + defer p.out(p.in("cloneState")) + } + + state := make(storeDict, len(p.cur.state)) + for k, v := range p.cur.state { + if c, ok := v.(Cloner); ok { + state[k] = c.Clone() + } else { + state[k] = v + } + } + return state +} + +// restore parser current state to the state storeDict. +// every restoreState should applied only one time for every cloned state +func (p *parser) restoreState(state storeDict) { + if p.debug { + defer p.out(p.in("restoreState")) + } + p.cur.state = state +} + +// get the slice of bytes from the savepoint start to the current position. +func (p *parser) sliceFrom(start savepoint) []byte { + return p.data[start.position.offset:p.pt.position.offset] +} + +func (p *parser) getMemoized(node interface{}) (resultTuple, bool) { + if len(p.memo) == 0 { + return resultTuple{}, false + } + m := p.memo[p.pt.offset] + if len(m) == 0 { + return resultTuple{}, false + } + res, ok := m[node] + return res, ok +} + +func (p *parser) setMemoized(pt savepoint, node interface{}, tuple resultTuple) { + if p.memo == nil { + p.memo = make(map[int]map[interface{}]resultTuple) + } + m := p.memo[pt.offset] + if m == nil { + m = make(map[interface{}]resultTuple) + p.memo[pt.offset] = m + } + m[node] = tuple +} + +func (p *parser) buildRulesTable(g *grammar) { + p.rules = make(map[string]*rule, len(g.rules)) + for _, r := range g.rules { + p.rules[r.name] = r + } +} + +func (p *parser) parse(g *grammar) (val interface{}, err error) { + if len(g.rules) == 0 { + p.addErr(errNoRule) + return nil, p.errs.err() + } + + // TODO : not super critical but this could be generated + p.buildRulesTable(g) + + if p.recover { + // panic can be used in action code to stop parsing immediately + // and return the panic as an error. + defer func() { + if e := recover(); e != nil { + if p.debug { + defer p.out(p.in("panic handler")) + } + val = nil + switch e := e.(type) { + case error: + p.addErr(e) + default: + p.addErr(fmt.Errorf("%v", e)) + } + err = p.errs.err() + } + }() + } + + startRule, ok := p.rules[p.entrypoint] + if !ok { + p.addErr(errInvalidEntrypoint) + return nil, p.errs.err() + } + + p.read() // advance to first rune + val, ok = p.parseRule(startRule) + if !ok { + if len(*p.errs) == 0 { + // If parsing fails, but no errors have been recorded, the expected values + // for the farthest parser position are returned as error. + maxFailExpectedMap := make(map[string]struct{}, len(p.maxFailExpected)) + for _, v := range p.maxFailExpected { + maxFailExpectedMap[v] = struct{}{} + } + expected := make([]string, 0, len(maxFailExpectedMap)) + eof := false + if _, ok := maxFailExpectedMap["!."]; ok { + delete(maxFailExpectedMap, "!.") + eof = true + } + for k := range maxFailExpectedMap { + expected = append(expected, k) + } + sort.Strings(expected) + if eof { + expected = append(expected, "EOF") + } + p.addErrAt(errors.New("no match found, expected: "+listJoin(expected, ", ", "or")), p.maxFailPos, expected) + } + + return nil, p.errs.err() + } + return val, p.errs.err() +} + +func listJoin(list []string, sep string, lastSep string) string { + switch len(list) { + case 0: + return "" + case 1: + return list[0] + default: + return fmt.Sprintf("%s %s %s", strings.Join(list[:len(list)-1], sep), lastSep, list[len(list)-1]) + } +} + +func (p *parser) parseRule(rule *rule) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRule " + rule.name)) + } + + if p.memoize { + res, ok := p.getMemoized(rule) + if ok { + p.restore(res.end) + return res.v, res.b + } + } + + start := p.pt + p.rstack = append(p.rstack, rule) + p.pushV() + val, ok := p.parseExpr(rule.expr) + p.popV() + p.rstack = p.rstack[:len(p.rstack)-1] + if ok && p.debug { + p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start))) + } + + if p.memoize { + p.setMemoized(start, rule, resultTuple{val, ok, p.pt}) + } + return val, ok +} + +func (p *parser) parseExpr(expr interface{}) (interface{}, bool) { + var pt savepoint + + if p.memoize { + res, ok := p.getMemoized(expr) + if ok { + p.restore(res.end) + return res.v, res.b + } + pt = p.pt + } + + p.ExprCnt++ + if p.ExprCnt > p.maxExprCnt { + panic(errMaxExprCnt) + } + + var val interface{} + var ok bool + switch expr := expr.(type) { + case *actionExpr: + val, ok = p.parseActionExpr(expr) + case *andCodeExpr: + val, ok = p.parseAndCodeExpr(expr) + case *andExpr: + val, ok = p.parseAndExpr(expr) + case *anyMatcher: + val, ok = p.parseAnyMatcher(expr) + case *charClassMatcher: + val, ok = p.parseCharClassMatcher(expr) + case *choiceExpr: + val, ok = p.parseChoiceExpr(expr) + case *labeledExpr: + val, ok = p.parseLabeledExpr(expr) + case *litMatcher: + val, ok = p.parseLitMatcher(expr) + case *notCodeExpr: + val, ok = p.parseNotCodeExpr(expr) + case *notExpr: + val, ok = p.parseNotExpr(expr) + case *oneOrMoreExpr: + val, ok = p.parseOneOrMoreExpr(expr) + case *recoveryExpr: + val, ok = p.parseRecoveryExpr(expr) + case *ruleRefExpr: + val, ok = p.parseRuleRefExpr(expr) + case *seqExpr: + val, ok = p.parseSeqExpr(expr) + case *stateCodeExpr: + val, ok = p.parseStateCodeExpr(expr) + case *throwExpr: + val, ok = p.parseThrowExpr(expr) + case *zeroOrMoreExpr: + val, ok = p.parseZeroOrMoreExpr(expr) + case *zeroOrOneExpr: + val, ok = p.parseZeroOrOneExpr(expr) + default: + panic(fmt.Sprintf("unknown expression type %T", expr)) + } + if p.memoize { + p.setMemoized(pt, expr, resultTuple{val, ok, p.pt}) + } + return val, ok +} + +func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseActionExpr")) + } + + start := p.pt + val, ok := p.parseExpr(act.expr) + if ok { + p.cur.pos = start.position + p.cur.text = p.sliceFrom(start) + state := p.cloneState() + actVal, err := act.run(p) + if err != nil { + p.addErrAt(err, start.position, []string{}) + } + p.restoreState(state) + + val = actVal + } + if ok && p.debug { + p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start))) + } + return val, ok +} + +func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAndCodeExpr")) + } + + state := p.cloneState() + + ok, err := and.run(p) + if err != nil { + p.addErr(err) + } + p.restoreState(state) + + return nil, ok +} + +func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAndExpr")) + } + + pt := p.pt + state := p.cloneState() + p.pushV() + _, ok := p.parseExpr(and.expr) + p.popV() + p.restoreState(state) + p.restore(pt) + + return nil, ok +} + +func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAnyMatcher")) + } + + if p.pt.rn == utf8.RuneError && p.pt.w == 0 { + // EOF - see utf8.DecodeRune + p.failAt(false, p.pt.position, ".") + return nil, false + } + start := p.pt + p.read() + p.failAt(true, start.position, ".") + return p.sliceFrom(start), true +} + +func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseCharClassMatcher")) + } + + cur := p.pt.rn + start := p.pt + + // can't match EOF + if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune + p.failAt(false, start.position, chr.val) + return nil, false + } + + if chr.ignoreCase { + cur = unicode.ToLower(cur) + } + + // try to match in the list of available chars + for _, rn := range chr.chars { + if rn == cur { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + // try to match in the list of ranges + for i := 0; i < len(chr.ranges); i += 2 { + if cur >= chr.ranges[i] && cur <= chr.ranges[i+1] { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + // try to match in the list of Unicode classes + for _, cl := range chr.classes { + if unicode.Is(cl, cur) { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + if chr.inverted { + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + p.failAt(false, start.position, chr.val) + return nil, false +} + +func (p *parser) incChoiceAltCnt(ch *choiceExpr, altI int) { + choiceIdent := fmt.Sprintf("%s %d:%d", p.rstack[len(p.rstack)-1].name, ch.pos.line, ch.pos.col) + m := p.ChoiceAltCnt[choiceIdent] + if m == nil { + m = make(map[string]int) + p.ChoiceAltCnt[choiceIdent] = m + } + // We increment altI by 1, so the keys do not start at 0 + alt := strconv.Itoa(altI + 1) + if altI == choiceNoMatch { + alt = p.choiceNoMatch + } + m[alt]++ +} + +func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseChoiceExpr")) + } + + for altI, alt := range ch.alternatives { + // dummy assignment to prevent compile error if optimized + _ = altI + + state := p.cloneState() + + p.pushV() + val, ok := p.parseExpr(alt) + p.popV() + if ok { + p.incChoiceAltCnt(ch, altI) + return val, ok + } + p.restoreState(state) + } + p.incChoiceAltCnt(ch, choiceNoMatch) + return nil, false +} + +func (p *parser) parseLabeledExpr(lab *labeledExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseLabeledExpr")) + } + + p.pushV() + val, ok := p.parseExpr(lab.expr) + p.popV() + if ok && lab.label != "" { + m := p.vstack[len(p.vstack)-1] + m[lab.label] = val + } + return val, ok +} + +func (p *parser) parseLitMatcher(lit *litMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseLitMatcher")) + } + + ignoreCase := "" + if lit.ignoreCase { + ignoreCase = "i" + } + val := fmt.Sprintf("%q%s", lit.val, ignoreCase) + start := p.pt + for _, want := range lit.val { + cur := p.pt.rn + if lit.ignoreCase { + cur = unicode.ToLower(cur) + } + if cur != want { + p.failAt(false, start.position, val) + p.restore(start) + return nil, false + } + p.read() + } + p.failAt(true, start.position, val) + return p.sliceFrom(start), true +} + +func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseNotCodeExpr")) + } + + state := p.cloneState() + + ok, err := not.run(p) + if err != nil { + p.addErr(err) + } + p.restoreState(state) + + return nil, !ok +} + +func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseNotExpr")) + } + + pt := p.pt + state := p.cloneState() + p.pushV() + p.maxFailInvertExpected = !p.maxFailInvertExpected + _, ok := p.parseExpr(not.expr) + p.maxFailInvertExpected = !p.maxFailInvertExpected + p.popV() + p.restoreState(state) + p.restore(pt) + + return nil, !ok +} + +func (p *parser) parseOneOrMoreExpr(expr *oneOrMoreExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseOneOrMoreExpr")) + } + + var vals []interface{} + + for { + p.pushV() + val, ok := p.parseExpr(expr.expr) + p.popV() + if !ok { + if len(vals) == 0 { + // did not match once, no match + return nil, false + } + return vals, true + } + vals = append(vals, val) + } +} + +func (p *parser) parseRecoveryExpr(recover *recoveryExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRecoveryExpr (" + strings.Join(recover.failureLabel, ",") + ")")) + } + + p.pushRecovery(recover.failureLabel, recover.recoverExpr) + val, ok := p.parseExpr(recover.expr) + p.popRecovery() + + return val, ok +} + +func (p *parser) parseRuleRefExpr(ref *ruleRefExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRuleRefExpr " + ref.name)) + } + + if ref.name == "" { + panic(fmt.Sprintf("%s: invalid rule: missing name", ref.pos)) + } + + rule := p.rules[ref.name] + if rule == nil { + p.addErr(fmt.Errorf("undefined rule: %s", ref.name)) + return nil, false + } + return p.parseRule(rule) +} + +func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseSeqExpr")) + } + + vals := make([]interface{}, 0, len(seq.exprs)) + + pt := p.pt + state := p.cloneState() + for _, expr := range seq.exprs { + val, ok := p.parseExpr(expr) + if !ok { + p.restoreState(state) + p.restore(pt) + return nil, false + } + vals = append(vals, val) + } + return vals, true +} + +func (p *parser) parseStateCodeExpr(state *stateCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseStateCodeExpr")) + } + + err := state.run(p) + if err != nil { + p.addErr(err) + } + return nil, true +} + +func (p *parser) parseThrowExpr(expr *throwExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseThrowExpr")) + } + + for i := len(p.recoveryStack) - 1; i >= 0; i-- { + if recoverExpr, ok := p.recoveryStack[i][expr.label]; ok { + if val, ok := p.parseExpr(recoverExpr); ok { + return val, ok + } + } + } + + return nil, false +} + +func (p *parser) parseZeroOrMoreExpr(expr *zeroOrMoreExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseZeroOrMoreExpr")) + } + + var vals []interface{} + + for { + p.pushV() + val, ok := p.parseExpr(expr.expr) + p.popV() + if !ok { + return vals, true + } + vals = append(vals, val) + } +} + +func (p *parser) parseZeroOrOneExpr(expr *zeroOrOneExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseZeroOrOneExpr")) + } + + p.pushV() + val, _ := p.parseExpr(expr.expr) + p.popV() + // whether it matched or not, consider it a match + return val, true +} diff --git a/here/internal/pathparser/parser.peg b/here/internal/pathparser/parser.peg new file mode 100644 index 0000000..e0eaee1 --- /dev/null +++ b/here/internal/pathparser/parser.peg @@ -0,0 +1,37 @@ +{ + package pathparser +} + +DOC <- pkg:PKG? n:NAME? (nl / EOF) { + return toPath(pkg, n) +} + +PKG <- !"/" n:STRING v:(VERSION)? ":"? { + return toPackage(n, v) +} + +NAME <- "/" n:STRING { + return toName(n) +} + +VERSION <- "@" ( "v" NUMBER "." NUMBER "." NUMBER ) { +return strings.TrimPrefix( string(c.text), "@" ), nil +} + +STRING <- ([^@:\\\n]+) { + return strings.TrimSpace(string(c.text)), nil +} + +NUMBER <- [0-9]+ { + return strconv.Atoi(strings.TrimSpace( string(c.text) )) +} + +_ "whitespace" ← [ \t]* + +nl "newline" ← [\n\r]* + +EOF <- !. { + return nil, nil +} + + diff --git a/here/internal/pathparser/parser_test.go b/here/internal/pathparser/parser_test.go new file mode 100644 index 0000000..6d6bcdb --- /dev/null +++ b/here/internal/pathparser/parser_test.go @@ -0,0 +1,89 @@ +package pathparser + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Parser(t *testing.T) { + table := []struct { + in string + exp Path + err bool + }{ + { + in: "/a/b/c.txt", + exp: Path{ + Name: "/a/b/c.txt", + }, + }, + { + in: "github.com/markbates/pkger:/a/b/c.txt", + exp: Path{ + Pkg: &Package{ + Name: "github.com/markbates/pkger", + }, + Name: "/a/b/c.txt"}, + }, + { + in: "github.com/markbates/pkger@v1.0.0:/a/b/c.txt", + exp: Path{ + Pkg: &Package{ + Name: "github.com/markbates/pkger", + Version: "v1.0.0", + }, + Name: "/a/b/c.txt"}, + }, + { + in: "github.com/markbates/pkger@v1.0.0", + exp: Path{ + Pkg: &Package{ + Name: "github.com/markbates/pkger", + Version: "v1.0.0", + }, + Name: "/", + }, + }, + { + in: "github.com/markbates/pkger", + exp: Path{ + Pkg: &Package{ + Name: "github.com/markbates/pkger", + }, + Name: "/", + }, + }, + { + in: "app", + exp: Path{ + Pkg: &Package{ + Name: "app", + }, + Name: "/", + }, + }, + } + + for _, tt := range table { + + t.Run(tt.in, func(st *testing.T) { + r := require.New(st) + + res, err := Parse(tt.in, []byte(tt.in)) + + if tt.err { + r.Error(err) + return + } + + r.NoError(err) + + pt, ok := res.(*Path) + r.True(ok) + + r.Equal(&tt.exp, pt) + }) + } + +} diff --git a/here/internal/pathparser/path.go b/here/internal/pathparser/path.go new file mode 100644 index 0000000..7b3e98e --- /dev/null +++ b/here/internal/pathparser/path.go @@ -0,0 +1,15 @@ +package pathparser + +type Path struct { + Pkg *Package + Name string +} + +func (p Path) IsZero() bool { + return p.Pkg == nil && len(p.Name) == 0 +} + +type Package struct { + Name string + Version string +} diff --git a/here/parse.go b/here/parse.go new file mode 100644 index 0000000..c005bb9 --- /dev/null +++ b/here/parse.go @@ -0,0 +1,44 @@ +package here + +import ( + "fmt" + "strings" + + "github.com/markbates/pkger/here/internal/pathparser" +) + +func (i Info) Parse(p string) (Path, error) { + pt := Path{ + Pkg: i.ImportPath, + Name: "/", + } + + res, err := pathparser.Parse(p, []byte(p)) + if err != nil { + return pt, err + } + + pp, ok := res.(*pathparser.Path) + if !ok { + return pt, fmt.Errorf("expected Path, got %T", res) + } + + if pp.Pkg != nil { + pt.Pkg = pp.Pkg.Name + } + + pt.Name = pp.Name + + her := i + if pt.Pkg != i.ImportPath { + her, err = Package(pt.Pkg) + if err != nil { + return pt, err + } + } + + pt.Name = strings.TrimPrefix(pt.Name, her.Dir) + pt.Name = strings.ReplaceAll(pt.Name, "\\", "/") + + return pt, nil +} diff --git a/pkger_test.go b/pkger_test.go index 221837f..bf02289 100644 --- a/pkger_test.go +++ b/pkger_test.go @@ -1,27 +1,34 @@ -package pkger +package pkger_test import ( + "fmt" "os" "path/filepath" "testing" + "github.com/markbates/pkger" "github.com/markbates/pkger/here" + "github.com/markbates/pkger/pkging/pkgtest" "github.com/stretchr/testify/require" ) func Test_Parse(t *testing.T) { r := require.New(t) - pt, err := Parse("github.com/rocket/ship:/little") + app, err := pkgtest.App() r.NoError(err) - r.Equal("github.com/rocket/ship", pt.Pkg) - r.Equal("/little", pt.Name) + + pt, err := pkger.Parse(fmt.Sprintf("%s:/public/index.html", app.Info.ImportPath)) + + r.NoError(err) + r.Equal(app.Info.ImportPath, pt.Pkg) + r.Equal("/public/index.html", pt.Name) } func Test_Abs(t *testing.T) { r := require.New(t) - s, err := Abs(":/rocket.ship") + s, err := pkger.Abs("/rocket.ship") r.NoError(err) pwd, err := os.Getwd() @@ -32,7 +39,7 @@ func Test_Abs(t *testing.T) { func Test_AbsPath(t *testing.T) { r := require.New(t) - s, err := AbsPath(here.Path{ + s, err := pkger.AbsPath(here.Path{ Pkg: "github.com/markbates/pkger", Name: "/rocket.ship", }) @@ -46,7 +53,7 @@ func Test_AbsPath(t *testing.T) { func Test_Current(t *testing.T) { r := require.New(t) - info, err := Current() + info, err := pkger.Current() r.NoError(err) r.Equal("github.com/markbates/pkger", info.ImportPath) } @@ -54,7 +61,7 @@ func Test_Current(t *testing.T) { func Test_Info(t *testing.T) { r := require.New(t) - info, err := Info("github.com/markbates/pkger") + info, err := pkger.Info("github.com/markbates/pkger") r.NoError(err) r.Equal("github.com/markbates/pkger", info.ImportPath) } @@ -62,9 +69,9 @@ func Test_Info(t *testing.T) { func Test_Create(t *testing.T) { r := require.New(t) - MkdirAll("/tmp", 0755) - defer RemoveAll("/tmp") - f, err := Create("/tmp/test.create") + pkger.MkdirAll("/tmp", 0755) + defer pkger.RemoveAll("/tmp") + f, err := pkger.Create("/tmp/test.create") r.NoError(err) r.Equal("/tmp/test.create", f.Name()) r.NoError(f.Close()) @@ -73,12 +80,12 @@ func Test_Create(t *testing.T) { func Test_MkdirAll(t *testing.T) { r := require.New(t) - _, err := Open("/tmp") + _, err := pkger.Open("/tmp") r.Error(err) - r.NoError(MkdirAll("/tmp", 0755)) - defer RemoveAll("/tmp") + r.NoError(pkger.MkdirAll("/tmp", 0755)) + defer pkger.RemoveAll("/tmp") - f, err := Open("/tmp") + f, err := pkger.Open("/tmp") r.NoError(err) r.Equal("/tmp", f.Name()) r.NoError(f.Close()) @@ -87,7 +94,7 @@ func Test_MkdirAll(t *testing.T) { func Test_Stat(t *testing.T) { r := require.New(t) - info, err := Stat("/go.mod") + info, err := pkger.Stat("/go.mod") r.NoError(err) r.Equal("/go.mod", info.Name()) } @@ -96,7 +103,7 @@ func Test_Walk(t *testing.T) { r := require.New(t) files := map[string]os.FileInfo{} - err := Walk("/pkging/pkgtest/internal/testdata/app", func(path string, info os.FileInfo, err error) error { + err := pkger.Walk("/pkging/pkgtest/internal/testdata/app", func(path string, info os.FileInfo, err error) error { if err != nil { return err } @@ -111,14 +118,14 @@ func Test_Walk(t *testing.T) { func Test_Remove(t *testing.T) { r := require.New(t) - MkdirAll("/tmp", 0755) - defer RemoveAll("/tmp") - f, err := Create("/tmp/test.create") + pkger.MkdirAll("/tmp", 0755) + defer pkger.RemoveAll("/tmp") + f, err := pkger.Create("/tmp/test.create") r.NoError(err) r.Equal("/tmp/test.create", f.Name()) r.NoError(f.Close()) - r.NoError(Remove("/tmp/test.create")) + r.NoError(pkger.Remove("/tmp/test.create")) - _, err = Stat("/tmp/test.create") + _, err = pkger.Stat("/tmp/test.create") r.Error(err) } diff --git a/pkging/pkgtest/file.go b/pkging/pkgtest/file.go index 81e018f..0ba8107 100644 --- a/pkging/pkgtest/file.go +++ b/pkging/pkgtest/file.go @@ -15,15 +15,16 @@ func (s Suite) Test_File_Info(t *testing.T) { pkg, err := s.Make() r.NoError(err) - cur, err := pkg.Current() + app, err := App() r.NoError(err) - ip := cur.ImportPath + ip := app.Info.ImportPath + mould := "/public/index.html" + table := []struct { in string }{ {in: mould}, - {in: ":" + mould}, {in: ip + ":" + mould}, } @@ -39,7 +40,7 @@ func (s Suite) Test_File_Info(t *testing.T) { f, err := pkg.Open(tt.in) r.NoError(err) r.Equal(mould, f.Name()) - r.Equal(cur.ImportPath, f.Info().ImportPath) + r.Equal(ip, f.Info().ImportPath) r.NoError(f.Close()) }) } @@ -62,7 +63,7 @@ func (s Suite) Test_File_Readdir(t *testing.T) { table := []struct { in string }{ - {in: ":/public"}, + {in: "/public"}, {in: ip + ":/public"}, } diff --git a/pkging/pkgtest/http.go b/pkging/pkgtest/http.go index daadaed..becd3ee 100644 --- a/pkging/pkgtest/http.go +++ b/pkging/pkgtest/http.go @@ -113,7 +113,6 @@ func (s Suite) Test_HTTP(t *testing.T) { in string }{ {in: "/public"}, - {in: ":" + "/public"}, {in: ip + ":" + "/public"}, } diff --git a/pkging/pkgtest/suite.go b/pkging/pkgtest/suite.go index a4d00cd..1c7f2ff 100644 --- a/pkging/pkgtest/suite.go +++ b/pkging/pkgtest/suite.go @@ -17,10 +17,6 @@ import ( "github.com/stretchr/testify/require" ) -const mould = "/easy/listening/sugar.file" -const hart = "/easy/listening/grant.hart" -const husker = "github.com/husker/du" - type Suite struct { Name string gen func() (pkging.Pkger, error) @@ -58,6 +54,7 @@ func (s Suite) Test(t *testing.T) { } func (s Suite) Run(t *testing.T, name string, fn func(t *testing.T)) { + t.Helper() t.Run(name, func(st *testing.T) { fn(st) }) @@ -79,16 +76,14 @@ func (s Suite) Test_Create(t *testing.T) { pkg, err := s.Make() r.NoError(err) - cur, err := pkg.Current() + app, err := App() r.NoError(err) - ip := cur.ImportPath table := []struct { in string }{ - {in: mould}, - {in: ":" + mould}, - {in: ip + ":" + mould}, + {in: "/public/index.html"}, + {in: app.Info.ImportPath + ":" + "/public/index.html"}, } for _, tt := range table { @@ -118,21 +113,18 @@ func (s Suite) Test_Create(t *testing.T) { func (s Suite) Test_Create_No_MkdirAll(t *testing.T) { r := require.New(t) - pkg, err := s.Make() + app, err := App() r.NoError(err) - cur, err := pkg.Current() - r.NoError(err) + ip := app.Info.ImportPath + mould := "/easy/listening/file.under" - ip := cur.ImportPath table := []struct { in string }{ {in: mould}, - {in: ":" + mould}, {in: ip + ":" + mould}, {in: filepath.Dir(mould)}, - {in: ":" + filepath.Dir(mould)}, {in: ip + ":" + filepath.Dir(mould)}, } @@ -181,21 +173,18 @@ func (s Suite) Test_Info(t *testing.T) { func (s Suite) Test_MkdirAll(t *testing.T) { r := require.New(t) - pkg, err := s.Make() + app, err := App() r.NoError(err) - cur, err := pkg.Current() - r.NoError(err) + ip := app.Info.ImportPath + mould := "/public/index.html" - ip := cur.ImportPath table := []struct { in string }{ {in: mould}, - {in: ":" + mould}, {in: ip + ":" + mould}, {in: filepath.Dir(mould)}, - {in: ":" + filepath.Dir(mould)}, {in: ip + ":" + filepath.Dir(mould)}, } @@ -228,20 +217,17 @@ func (s Suite) Test_MkdirAll(t *testing.T) { func (s Suite) Test_Open_File(t *testing.T) { r := require.New(t) - pkg, err := s.Make() + app, err := App() r.NoError(err) - cur, err := pkg.Current() - r.NoError(err) + ip := app.Info.ImportPath + mould := "/public/index.html" - ip := cur.ImportPath table := []struct { in string }{ {in: mould}, - {in: ":" + mould}, {in: ip + ":" + mould}, - {in: hart}, } for _, tt := range table { @@ -285,22 +271,17 @@ func (s Suite) Test_Parse(t *testing.T) { r.NoError(err) cur, err := pkg.Current() - r.NoError(err) - ip := cur.ImportPath + mould := "/public/index.html" + table := []struct { in string exp here.Path }{ {in: mould, exp: here.Path{Pkg: ip, Name: mould}}, {in: filepath.Join(cur.Dir, mould), exp: here.Path{Pkg: ip, Name: mould}}, - {in: ":" + mould, exp: here.Path{Pkg: ip, Name: mould}}, {in: ip + ":" + mould, exp: here.Path{Pkg: ip, Name: mould}}, {in: ip, exp: here.Path{Pkg: ip, Name: "/"}}, - {in: ":", exp: here.Path{Pkg: ip, Name: "/"}}, - {in: husker + ":" + mould, exp: here.Path{Pkg: husker, Name: mould}}, - {in: husker, exp: here.Path{Pkg: husker, Name: "/"}}, - {in: husker + ":", exp: here.Path{Pkg: husker, Name: "/"}}, } for _, tt := range table { @@ -320,19 +301,18 @@ func (s Suite) Test_Stat_Error(t *testing.T) { pkg, err := s.Make() r.NoError(err) - cur, err := pkg.Current() + app, err := App() r.NoError(err) - ip := cur.ImportPath + ip := app.Info.ImportPath table := []struct { in string }{ - {in: hart}, - {in: ":" + hart}, + {in: "/dontexist"}, {in: ip}, {in: ip + ":"}, - {in: ip + ":" + hart}, + {in: ip + ":" + "/dontexist"}, } for _, tt := range table { @@ -357,18 +337,17 @@ func (s Suite) Test_Stat_Dir(t *testing.T) { pkg, err := s.Make() r.NoError(err) - cur, err := pkg.Current() + app, err := App() r.NoError(err) - dir := filepath.Dir(mould) - ip := cur.ImportPath + ip := app.Info.ImportPath + dir := app.Paths.Public[1] table := []struct { in string }{ {in: ip}, {in: dir}, - {in: ":" + dir}, {in: ip + ":" + dir}, } @@ -393,20 +372,17 @@ func (s Suite) Test_Stat_Dir(t *testing.T) { func (s Suite) Test_Stat_File(t *testing.T) { r := require.New(t) - pkg, err := s.Make() + app, err := App() r.NoError(err) - cur, err := pkg.Current() - r.NoError(err) + ip := app.Info.ImportPath + mould := "/public/index.html" - ip := cur.ImportPath table := []struct { in string }{ {in: mould}, - {in: ":" + mould}, {in: ip + ":" + mould}, - {in: hart}, } for _, tt := range table { @@ -517,7 +493,6 @@ func (s Suite) Test_Remove(t *testing.T) { in string }{ {in: "/public/images/img1.png"}, - {in: ":/public/images/img1.png"}, {in: ip + ":/public/images/img1.png"}, } @@ -558,7 +533,6 @@ func (s Suite) Test_RemoveAll(t *testing.T) { in string }{ {in: "/public"}, - {in: ":/public"}, {in: ip + ":/public"}, } diff --git a/pkging/pkgtest/util.go b/pkging/pkgtest/util.go index 6386183..666e9ab 100644 --- a/pkging/pkgtest/util.go +++ b/pkging/pkgtest/util.go @@ -13,20 +13,17 @@ import ( func (s Suite) Test_Util_ReadFile(t *testing.T) { r := require.New(t) - pkg, err := s.Make() + app, err := App() r.NoError(err) - cur, err := pkg.Current() - r.NoError(err) + ip := app.Info.ImportPath + mould := "/public/index.html" - ip := cur.ImportPath table := []struct { in string }{ {in: mould}, - {in: ":" + mould}, {in: ip + ":" + mould}, - {in: hart}, } for _, tt := range table {