package parse import ( "bufio" "bytes" "fmt" "github.com/yuin/gopher-lua/ast" "io" "reflect" "strconv" "strings" ) const EOF = -1 const whitespace1 = 1<<'\t' | 1<<' ' const whitespace2 = 1<<'\t' | 1<<'\n' | 1<<'\r' | 1<<' ' type Error struct { Pos ast.Position Message string Token string } func (e *Error) Error() string { pos := e.Pos if pos.Line == EOF { return fmt.Sprintf("%v at EOF: %s\n", pos.Source, e.Message) } else { return fmt.Sprintf("%v line:%d(column:%d) near '%v': %s\n", pos.Source, pos.Line, pos.Column, e.Token, e.Message) } } func writeChar(buf *bytes.Buffer, c int) { buf.WriteByte(byte(c)) } func isDecimal(ch int) bool { return '0' <= ch && ch <= '9' } func isIdent(ch int, pos int) bool { return ch == '_' || 'A' <= ch && ch <= 'Z' || 'a' <= ch && ch <= 'z' || isDecimal(ch) && pos > 0 } func isDigit(ch int) bool { return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' } type Scanner struct { Pos ast.Position reader *bufio.Reader } func NewScanner(reader io.Reader, source string) *Scanner { return &Scanner{ Pos: ast.Position{source, 1, 0}, reader: bufio.NewReaderSize(reader, 4096), } } func (sc *Scanner) Error(tok string, msg string) *Error { return &Error{sc.Pos, msg, tok} } func (sc *Scanner) TokenError(tok ast.Token, msg string) *Error { return &Error{tok.Pos, msg, tok.Str} } func (sc *Scanner) readNext() int { ch, err := sc.reader.ReadByte() if err == io.EOF { return EOF } return int(ch) } func (sc *Scanner) Newline(ch int) { if ch < 0 { return } sc.Pos.Line += 1 sc.Pos.Column = 0 next := sc.Peek() if ch == '\n' && next == '\r' || ch == '\r' && next == '\n' { sc.reader.ReadByte() } } func (sc *Scanner) Next() int { ch := sc.readNext() switch ch { case '\n', '\r': sc.Newline(ch) ch = int('\n') case EOF: sc.Pos.Line = EOF sc.Pos.Column = 0 default: sc.Pos.Column++ } return ch } func (sc *Scanner) Peek() int { ch := sc.readNext() if ch != EOF { sc.reader.UnreadByte() } return ch } func (sc *Scanner) skipWhiteSpace(whitespace int64) int { ch := sc.Next() for ; whitespace&(1<': if sc.Peek() == '=' { tok.Type = TGte tok.Str = ">=" sc.Next() } else { tok.Type = ch tok.Str = string(ch) } case '.': ch2 := sc.Peek() switch { case isDecimal(ch2): tok.Type = TNumber err = sc.scanNumber(ch, buf) tok.Str = buf.String() case ch2 == '.': writeChar(buf, ch) writeChar(buf, sc.Next()) if sc.Peek() == '.' { writeChar(buf, sc.Next()) tok.Type = T3Comma } else { tok.Type = T2Comma } default: tok.Type = '.' } tok.Str = buf.String() case '+', '*', '/', '%', '^', '#', '(', ')', '{', '}', ']', ';', ':', ',': tok.Type = ch tok.Str = string(ch) default: writeChar(buf, ch) err = sc.Error(buf.String(), "Invalid token") goto finally } } finally: tok.Name = TokenName(int(tok.Type)) return tok, err } // yacc interface {{{ type Lexer struct { scanner *Scanner Stmts []ast.Stmt PNewLine bool Token ast.Token PrevTokenType int } func (lx *Lexer) Lex(lval *yySymType) int { lx.PrevTokenType = lx.Token.Type tok, err := lx.scanner.Scan(lx) if err != nil { panic(err) } if tok.Type < 0 { return 0 } lval.token = tok lx.Token = tok return int(tok.Type) } func (lx *Lexer) Error(message string) { panic(lx.scanner.Error(lx.Token.Str, message)) } func (lx *Lexer) TokenError(tok ast.Token, message string) { panic(lx.scanner.TokenError(tok, message)) } func Parse(reader io.Reader, name string) (chunk []ast.Stmt, err error) { lexer := &Lexer{NewScanner(reader, name), nil, false, ast.Token{Str: ""}, TNil} chunk = nil defer func() { if e := recover(); e != nil { err, _ = e.(error) } }() yyParse(lexer) chunk = lexer.Stmts return } // }}} // Dump {{{ func isInlineDumpNode(rv reflect.Value) bool { switch rv.Kind() { case reflect.Struct, reflect.Slice, reflect.Interface, reflect.Ptr: return false default: return true } } func dump(node interface{}, level int, s string) string { rt := reflect.TypeOf(node) if fmt.Sprint(rt) == "" { return strings.Repeat(s, level) + "" } rv := reflect.ValueOf(node) buf := []string{} switch rt.Kind() { case reflect.Slice: if rv.Len() == 0 { return strings.Repeat(s, level) + "" } for i := 0; i < rv.Len(); i++ { buf = append(buf, dump(rv.Index(i).Interface(), level, s)) } case reflect.Ptr: vt := rv.Elem() tt := rt.Elem() indicies := []int{} for i := 0; i < tt.NumField(); i++ { if strings.Index(tt.Field(i).Name, "Base") > -1 { continue } indicies = append(indicies, i) } switch { case len(indicies) == 0: return strings.Repeat(s, level) + "" case len(indicies) == 1 && isInlineDumpNode(vt.Field(indicies[0])): for _, i := range indicies { buf = append(buf, strings.Repeat(s, level)+"- Node$"+tt.Name()+": "+dump(vt.Field(i).Interface(), 0, s)) } default: buf = append(buf, strings.Repeat(s, level)+"- Node$"+tt.Name()) for _, i := range indicies { if isInlineDumpNode(vt.Field(i)) { inf := dump(vt.Field(i).Interface(), 0, s) buf = append(buf, strings.Repeat(s, level+1)+tt.Field(i).Name+": "+inf) } else { buf = append(buf, strings.Repeat(s, level+1)+tt.Field(i).Name+": ") buf = append(buf, dump(vt.Field(i).Interface(), level+2, s)) } } } default: buf = append(buf, strings.Repeat(s, level)+fmt.Sprint(node)) } return strings.Join(buf, "\n") } func Dump(chunk []ast.Stmt) string { return dump(chunk, 0, " ") } // }}