mirror of
https://github.com/robertkrimen/otto
synced 2025-10-05 19:19:10 +08:00
chore: rename _parser (#483)
Rename _parser -> parser missed in previous refactor.
This commit is contained in:
parent
ddcbf14a26
commit
5d81e9e02d
File diff suppressed because it is too large
Load Diff
|
@ -68,7 +68,7 @@ func (e Error) Error() string {
|
|||
)
|
||||
}
|
||||
|
||||
func (p *_parser) error(place interface{}, msg string, msgValues ...interface{}) {
|
||||
func (p *parser) error(place interface{}, msg string, msgValues ...interface{}) {
|
||||
var idx file.Idx
|
||||
switch place := place.(type) {
|
||||
case int:
|
||||
|
@ -88,7 +88,7 @@ func (p *_parser) error(place interface{}, msg string, msgValues ...interface{})
|
|||
p.errors.Add(position, msg)
|
||||
}
|
||||
|
||||
func (p *_parser) errorUnexpected(idx file.Idx, chr rune) {
|
||||
func (p *parser) errorUnexpected(idx file.Idx, chr rune) {
|
||||
if chr == -1 {
|
||||
p.error(idx, errUnexpectedEndOfInput)
|
||||
return
|
||||
|
@ -96,7 +96,7 @@ func (p *_parser) errorUnexpected(idx file.Idx, chr rune) {
|
|||
p.error(idx, errUnexpectedToken, token.ILLEGAL)
|
||||
}
|
||||
|
||||
func (p *_parser) errorUnexpectedToken(tkn token.Token) {
|
||||
func (p *parser) errorUnexpectedToken(tkn token.Token) {
|
||||
if tkn == token.EOF {
|
||||
p.error(file.Idx(0), errUnexpectedEndOfInput)
|
||||
return
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
"github.com/robertkrimen/otto/token"
|
||||
)
|
||||
|
||||
func (p *_parser) parseIdentifier() *ast.Identifier {
|
||||
func (p *parser) parseIdentifier() *ast.Identifier {
|
||||
literal := p.literal
|
||||
idx := p.idx
|
||||
if p.mode&StoreComments != 0 {
|
||||
|
@ -27,7 +27,7 @@ func (p *_parser) parseIdentifier() *ast.Identifier {
|
|||
return exp
|
||||
}
|
||||
|
||||
func (p *_parser) parsePrimaryExpression() ast.Expression {
|
||||
func (p *parser) parsePrimaryExpression() ast.Expression {
|
||||
literal := p.literal
|
||||
idx := p.idx
|
||||
switch p.token {
|
||||
|
@ -118,7 +118,7 @@ func (p *_parser) parsePrimaryExpression() ast.Expression {
|
|||
return &ast.BadExpression{From: idx, To: p.idx}
|
||||
}
|
||||
|
||||
func (p *_parser) parseRegExpLiteral() *ast.RegExpLiteral {
|
||||
func (p *parser) parseRegExpLiteral() *ast.RegExpLiteral {
|
||||
offset := p.chrOffset - 1 // Opening slash already gotten
|
||||
if p.token == token.QUOTIENT_ASSIGN {
|
||||
offset-- // =
|
||||
|
@ -170,7 +170,7 @@ func (p *_parser) parseRegExpLiteral() *ast.RegExpLiteral {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseVariableDeclaration(declarationList *[]*ast.VariableExpression) ast.Expression {
|
||||
func (p *parser) parseVariableDeclaration(declarationList *[]*ast.VariableExpression) ast.Expression {
|
||||
if p.token != token.IDENTIFIER {
|
||||
idx := p.expect(token.IDENTIFIER)
|
||||
p.nextStatement()
|
||||
|
@ -203,7 +203,7 @@ func (p *_parser) parseVariableDeclaration(declarationList *[]*ast.VariableExpre
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseVariableDeclarationList(idx file.Idx) []ast.Expression {
|
||||
func (p *parser) parseVariableDeclarationList(idx file.Idx) []ast.Expression {
|
||||
var declarationList []*ast.VariableExpression // Avoid bad expressions
|
||||
var list []ast.Expression
|
||||
|
||||
|
@ -230,7 +230,7 @@ func (p *_parser) parseVariableDeclarationList(idx file.Idx) []ast.Expression {
|
|||
return list
|
||||
}
|
||||
|
||||
func (p *_parser) parseObjectPropertyKey() (string, string) {
|
||||
func (p *parser) parseObjectPropertyKey() (string, string) {
|
||||
idx, tkn, literal := p.idx, p.token, p.literal
|
||||
value := ""
|
||||
if p.mode&StoreComments != 0 {
|
||||
|
@ -264,7 +264,7 @@ func (p *_parser) parseObjectPropertyKey() (string, string) {
|
|||
return literal, value
|
||||
}
|
||||
|
||||
func (p *_parser) parseObjectProperty() ast.Property {
|
||||
func (p *parser) parseObjectProperty() ast.Property {
|
||||
literal, value := p.parseObjectPropertyKey()
|
||||
if literal == "get" && p.token != token.COLON {
|
||||
idx := p.idx
|
||||
|
@ -315,7 +315,7 @@ func (p *_parser) parseObjectProperty() ast.Property {
|
|||
return exp
|
||||
}
|
||||
|
||||
func (p *_parser) parseObjectLiteral() ast.Expression {
|
||||
func (p *parser) parseObjectLiteral() ast.Expression {
|
||||
var value []ast.Property
|
||||
idx0 := p.expect(token.LEFT_BRACE)
|
||||
for p.token != token.RIGHT_BRACE && p.token != token.EOF {
|
||||
|
@ -340,7 +340,7 @@ func (p *_parser) parseObjectLiteral() ast.Expression {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseArrayLiteral() ast.Expression {
|
||||
func (p *parser) parseArrayLiteral() ast.Expression {
|
||||
idx0 := p.expect(token.LEFT_BRACKET)
|
||||
var value []ast.Expression
|
||||
for p.token != token.RIGHT_BRACKET && p.token != token.EOF {
|
||||
|
@ -379,7 +379,7 @@ func (p *_parser) parseArrayLiteral() ast.Expression {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseArgumentList() (argumentList []ast.Expression, idx0, idx1 file.Idx) { //nolint: nonamedreturns
|
||||
func (p *parser) parseArgumentList() (argumentList []ast.Expression, idx0, idx1 file.Idx) { //nolint: nonamedreturns
|
||||
if p.mode&StoreComments != 0 {
|
||||
p.comments.Unset()
|
||||
}
|
||||
|
@ -407,7 +407,7 @@ func (p *_parser) parseArgumentList() (argumentList []ast.Expression, idx0, idx1
|
|||
return
|
||||
}
|
||||
|
||||
func (p *_parser) parseCallExpression(left ast.Expression) ast.Expression {
|
||||
func (p *parser) parseCallExpression(left ast.Expression) ast.Expression {
|
||||
argumentList, idx0, idx1 := p.parseArgumentList()
|
||||
exp := &ast.CallExpression{
|
||||
Callee: left,
|
||||
|
@ -422,7 +422,7 @@ func (p *_parser) parseCallExpression(left ast.Expression) ast.Expression {
|
|||
return exp
|
||||
}
|
||||
|
||||
func (p *_parser) parseDotMember(left ast.Expression) ast.Expression {
|
||||
func (p *parser) parseDotMember(left ast.Expression) ast.Expression {
|
||||
period := p.expect(token.PERIOD)
|
||||
|
||||
literal := p.literal
|
||||
|
@ -445,7 +445,7 @@ func (p *_parser) parseDotMember(left ast.Expression) ast.Expression {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseBracketMember(left ast.Expression) ast.Expression {
|
||||
func (p *parser) parseBracketMember(left ast.Expression) ast.Expression {
|
||||
idx0 := p.expect(token.LEFT_BRACKET)
|
||||
member := p.parseExpression()
|
||||
idx1 := p.expect(token.RIGHT_BRACKET)
|
||||
|
@ -457,7 +457,7 @@ func (p *_parser) parseBracketMember(left ast.Expression) ast.Expression {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseNewExpression() ast.Expression {
|
||||
func (p *parser) parseNewExpression() ast.Expression {
|
||||
idx := p.expect(token.NEW)
|
||||
callee := p.parseLeftHandSideExpression()
|
||||
node := &ast.NewExpression{
|
||||
|
@ -478,7 +478,7 @@ func (p *_parser) parseNewExpression() ast.Expression {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseLeftHandSideExpression() ast.Expression {
|
||||
func (p *parser) parseLeftHandSideExpression() ast.Expression {
|
||||
var left ast.Expression
|
||||
if p.token == token.NEW {
|
||||
left = p.parseNewExpression()
|
||||
|
@ -506,7 +506,7 @@ func (p *_parser) parseLeftHandSideExpression() ast.Expression {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseLeftHandSideExpressionAllowCall() ast.Expression {
|
||||
func (p *parser) parseLeftHandSideExpressionAllowCall() ast.Expression {
|
||||
allowIn := p.scope.allowIn
|
||||
p.scope.allowIn = true
|
||||
defer func() {
|
||||
|
@ -551,7 +551,7 @@ func (p *_parser) parseLeftHandSideExpressionAllowCall() ast.Expression {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parsePostfixExpression() ast.Expression {
|
||||
func (p *parser) parsePostfixExpression() ast.Expression {
|
||||
operand := p.parseLeftHandSideExpressionAllowCall()
|
||||
|
||||
switch p.token {
|
||||
|
@ -590,7 +590,7 @@ func (p *_parser) parsePostfixExpression() ast.Expression {
|
|||
return operand
|
||||
}
|
||||
|
||||
func (p *_parser) parseUnaryExpression() ast.Expression {
|
||||
func (p *parser) parseUnaryExpression() ast.Expression {
|
||||
switch p.token {
|
||||
case token.PLUS, token.MINUS, token.NOT, token.BITWISE_NOT:
|
||||
fallthrough
|
||||
|
@ -632,7 +632,7 @@ func (p *_parser) parseUnaryExpression() ast.Expression {
|
|||
return p.parsePostfixExpression()
|
||||
}
|
||||
|
||||
func (p *_parser) parseMultiplicativeExpression() ast.Expression {
|
||||
func (p *parser) parseMultiplicativeExpression() ast.Expression {
|
||||
next := p.parseUnaryExpression
|
||||
left := next()
|
||||
|
||||
|
@ -654,7 +654,7 @@ func (p *_parser) parseMultiplicativeExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseAdditiveExpression() ast.Expression {
|
||||
func (p *parser) parseAdditiveExpression() ast.Expression {
|
||||
next := p.parseMultiplicativeExpression
|
||||
left := next()
|
||||
|
||||
|
@ -675,7 +675,7 @@ func (p *_parser) parseAdditiveExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseShiftExpression() ast.Expression {
|
||||
func (p *parser) parseShiftExpression() ast.Expression {
|
||||
next := p.parseAdditiveExpression
|
||||
left := next()
|
||||
|
||||
|
@ -697,7 +697,7 @@ func (p *_parser) parseShiftExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseRelationalExpression() ast.Expression {
|
||||
func (p *parser) parseRelationalExpression() ast.Expression {
|
||||
next := p.parseShiftExpression
|
||||
left := next()
|
||||
|
||||
|
@ -756,7 +756,7 @@ func (p *_parser) parseRelationalExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseEqualityExpression() ast.Expression {
|
||||
func (p *parser) parseEqualityExpression() ast.Expression {
|
||||
next := p.parseRelationalExpression
|
||||
left := next()
|
||||
|
||||
|
@ -779,7 +779,7 @@ func (p *_parser) parseEqualityExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseBitwiseAndExpression() ast.Expression {
|
||||
func (p *parser) parseBitwiseAndExpression() ast.Expression {
|
||||
next := p.parseEqualityExpression
|
||||
left := next()
|
||||
|
||||
|
@ -800,7 +800,7 @@ func (p *_parser) parseBitwiseAndExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseBitwiseExclusiveOrExpression() ast.Expression {
|
||||
func (p *parser) parseBitwiseExclusiveOrExpression() ast.Expression {
|
||||
next := p.parseBitwiseAndExpression
|
||||
left := next()
|
||||
|
||||
|
@ -821,7 +821,7 @@ func (p *_parser) parseBitwiseExclusiveOrExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseBitwiseOrExpression() ast.Expression {
|
||||
func (p *parser) parseBitwiseOrExpression() ast.Expression {
|
||||
next := p.parseBitwiseExclusiveOrExpression
|
||||
left := next()
|
||||
|
||||
|
@ -842,7 +842,7 @@ func (p *_parser) parseBitwiseOrExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseLogicalAndExpression() ast.Expression {
|
||||
func (p *parser) parseLogicalAndExpression() ast.Expression {
|
||||
next := p.parseBitwiseOrExpression
|
||||
left := next()
|
||||
|
||||
|
@ -863,7 +863,7 @@ func (p *_parser) parseLogicalAndExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseLogicalOrExpression() ast.Expression {
|
||||
func (p *parser) parseLogicalOrExpression() ast.Expression {
|
||||
next := p.parseLogicalAndExpression
|
||||
left := next()
|
||||
|
||||
|
@ -884,7 +884,7 @@ func (p *_parser) parseLogicalOrExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseConditionlExpression() ast.Expression {
|
||||
func (p *parser) parseConditionlExpression() ast.Expression {
|
||||
left := p.parseLogicalOrExpression()
|
||||
|
||||
if p.token == token.QUESTION_MARK {
|
||||
|
@ -910,7 +910,7 @@ func (p *_parser) parseConditionlExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseAssignmentExpression() ast.Expression {
|
||||
func (p *parser) parseAssignmentExpression() ast.Expression {
|
||||
left := p.parseConditionlExpression()
|
||||
var operator token.Token
|
||||
switch p.token {
|
||||
|
@ -972,7 +972,7 @@ func (p *_parser) parseAssignmentExpression() ast.Expression {
|
|||
return left
|
||||
}
|
||||
|
||||
func (p *_parser) parseExpression() ast.Expression {
|
||||
func (p *parser) parseExpression() ast.Expression {
|
||||
next := p.parseAssignmentExpression
|
||||
left := next()
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ func isIdentifierPart(chr rune) bool {
|
|||
chr >= utf8.RuneSelf && (unicode.IsLetter(chr) || unicode.IsDigit(chr))
|
||||
}
|
||||
|
||||
func (p *_parser) scanIdentifier() (string, error) {
|
||||
func (p *parser) scanIdentifier() (string, error) {
|
||||
offset := p.chrOffset
|
||||
parse := false
|
||||
for isIdentifierPart(p.chr) {
|
||||
|
@ -119,7 +119,7 @@ func isLineTerminator(chr rune) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func (p *_parser) scan() (tkn token.Token, literal string, idx file.Idx) { //nolint: nonamedreturns
|
||||
func (p *parser) scan() (tkn token.Token, literal string, idx file.Idx) { //nolint: nonamedreturns
|
||||
p.implicitSemicolon = false
|
||||
|
||||
for {
|
||||
|
@ -310,7 +310,7 @@ func (p *_parser) scan() (tkn token.Token, literal string, idx file.Idx) { //nol
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) switch2(tkn0, tkn1 token.Token) token.Token {
|
||||
func (p *parser) switch2(tkn0, tkn1 token.Token) token.Token {
|
||||
if p.chr == '=' {
|
||||
p.read()
|
||||
return tkn1
|
||||
|
@ -318,7 +318,7 @@ func (p *_parser) switch2(tkn0, tkn1 token.Token) token.Token {
|
|||
return tkn0
|
||||
}
|
||||
|
||||
func (p *_parser) switch3(tkn0, tkn1 token.Token, chr2 rune, tkn2 token.Token) token.Token {
|
||||
func (p *parser) switch3(tkn0, tkn1 token.Token, chr2 rune, tkn2 token.Token) token.Token {
|
||||
if p.chr == '=' {
|
||||
p.read()
|
||||
return tkn1
|
||||
|
@ -330,7 +330,7 @@ func (p *_parser) switch3(tkn0, tkn1 token.Token, chr2 rune, tkn2 token.Token) t
|
|||
return tkn0
|
||||
}
|
||||
|
||||
func (p *_parser) switch4(tkn0, tkn1 token.Token, chr2 rune, tkn2, tkn3 token.Token) token.Token {
|
||||
func (p *parser) switch4(tkn0, tkn1 token.Token, chr2 rune, tkn2, tkn3 token.Token) token.Token {
|
||||
if p.chr == '=' {
|
||||
p.read()
|
||||
return tkn1
|
||||
|
@ -346,7 +346,7 @@ func (p *_parser) switch4(tkn0, tkn1 token.Token, chr2 rune, tkn2, tkn3 token.To
|
|||
return tkn0
|
||||
}
|
||||
|
||||
func (p *_parser) switch6(tkn0, tkn1 token.Token, chr2 rune, tkn2, tkn3 token.Token, chr3 rune, tkn4, tkn5 token.Token) token.Token {
|
||||
func (p *parser) switch6(tkn0, tkn1 token.Token, chr2 rune, tkn2, tkn3 token.Token, chr3 rune, tkn4, tkn5 token.Token) token.Token {
|
||||
if p.chr == '=' {
|
||||
p.read()
|
||||
return tkn1
|
||||
|
@ -370,7 +370,7 @@ func (p *_parser) switch6(tkn0, tkn1 token.Token, chr2 rune, tkn2, tkn3 token.To
|
|||
return tkn0
|
||||
}
|
||||
|
||||
func (p *_parser) chrAt(index int) chr { //nolint: unused
|
||||
func (p *parser) chrAt(index int) chr { //nolint: unused
|
||||
value, width := utf8.DecodeRuneInString(p.str[index:])
|
||||
return chr{
|
||||
value: value,
|
||||
|
@ -378,14 +378,14 @@ func (p *_parser) chrAt(index int) chr { //nolint: unused
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) peek() rune {
|
||||
func (p *parser) peek() rune {
|
||||
if p.offset+1 < p.length {
|
||||
return rune(p.str[p.offset+1])
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func (p *_parser) read() {
|
||||
func (p *parser) read() {
|
||||
if p.offset < p.length {
|
||||
p.chrOffset = p.offset
|
||||
chr, width := rune(p.str[p.offset]), 1
|
||||
|
@ -422,7 +422,7 @@ func (p *regExpParser) read() {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) readSingleLineComment() []rune {
|
||||
func (p *parser) readSingleLineComment() []rune {
|
||||
var result []rune
|
||||
for p.chr != -1 {
|
||||
p.read()
|
||||
|
@ -436,7 +436,7 @@ func (p *_parser) readSingleLineComment() []rune {
|
|||
return result[:len(result)-1]
|
||||
}
|
||||
|
||||
func (p *_parser) readMultiLineComment() []rune {
|
||||
func (p *parser) readMultiLineComment() []rune {
|
||||
var result []rune
|
||||
p.read()
|
||||
for p.chr >= 0 {
|
||||
|
@ -455,7 +455,7 @@ func (p *_parser) readMultiLineComment() []rune {
|
|||
return result
|
||||
}
|
||||
|
||||
func (p *_parser) skipSingleLineComment() {
|
||||
func (p *parser) skipSingleLineComment() {
|
||||
for p.chr != -1 {
|
||||
p.read()
|
||||
if isLineTerminator(p.chr) {
|
||||
|
@ -464,7 +464,7 @@ func (p *_parser) skipSingleLineComment() {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) skipMultiLineComment() {
|
||||
func (p *parser) skipMultiLineComment() {
|
||||
p.read()
|
||||
for p.chr >= 0 {
|
||||
chr := p.chr
|
||||
|
@ -478,7 +478,7 @@ func (p *_parser) skipMultiLineComment() {
|
|||
p.errorUnexpected(0, p.chr)
|
||||
}
|
||||
|
||||
func (p *_parser) skipWhiteSpace() {
|
||||
func (p *parser) skipWhiteSpace() {
|
||||
for {
|
||||
switch p.chr {
|
||||
case ' ', '\t', '\f', '\v', '\u00a0', '\ufeff':
|
||||
|
@ -508,13 +508,13 @@ func (p *_parser) skipWhiteSpace() {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) scanMantissa(base int) {
|
||||
func (p *parser) scanMantissa(base int) {
|
||||
for digitValue(p.chr) < base {
|
||||
p.read()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *_parser) scanEscape(quote rune) {
|
||||
func (p *parser) scanEscape(quote rune) {
|
||||
var length, base uint32
|
||||
switch p.chr {
|
||||
// Octal:
|
||||
|
@ -547,7 +547,7 @@ func (p *_parser) scanEscape(quote rune) {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) scanString(offset int) (string, error) {
|
||||
func (p *parser) scanString(offset int) (string, error) {
|
||||
// " ' /
|
||||
quote := rune(p.str[offset])
|
||||
|
||||
|
@ -591,7 +591,7 @@ newline:
|
|||
return "", errors.New(err)
|
||||
}
|
||||
|
||||
func (p *_parser) scanNewline() {
|
||||
func (p *parser) scanNewline() {
|
||||
if p.chr == '\r' {
|
||||
p.read()
|
||||
if p.chr != '\n' {
|
||||
|
@ -780,7 +780,7 @@ func parseStringLiteral(literal string) (string, error) {
|
|||
return buffer.String(), nil
|
||||
}
|
||||
|
||||
func (p *_parser) scanNumericLiteral(decimalPoint bool) (token.Token, string) {
|
||||
func (p *parser) scanNumericLiteral(decimalPoint bool) (token.Token, string) {
|
||||
offset := p.chrOffset
|
||||
tkn := token.NUMBER
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ var (
|
|||
|
||||
func TestLexer(t *testing.T) {
|
||||
tt(t, func() {
|
||||
setup := func(src string) *_parser {
|
||||
setup := func(src string) *parser {
|
||||
parser := newParser("", src, 1, nil)
|
||||
return parser
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ const (
|
|||
StoreComments
|
||||
)
|
||||
|
||||
type _parser struct { //nolint: maligned
|
||||
type parser struct { //nolint: maligned
|
||||
str string
|
||||
length int
|
||||
base int
|
||||
|
@ -93,8 +93,8 @@ type Parser interface {
|
|||
Scan() (tkn token.Token, literal string, idx file.Idx)
|
||||
}
|
||||
|
||||
func newParser(filename, src string, base int, sm *sourcemap.Consumer) *_parser {
|
||||
return &_parser{
|
||||
func newParser(filename, src string, base int, sm *sourcemap.Consumer) *parser {
|
||||
return &parser{
|
||||
chr: ' ', // This is set so we can start scanning by skipping whitespace
|
||||
str: src,
|
||||
length: len(src),
|
||||
|
@ -190,10 +190,10 @@ func ParseFileWithSourceMap(fileSet *file.FileSet, filename string, javascriptSo
|
|||
base = fileSet.AddFile(filename, string(src))
|
||||
}
|
||||
|
||||
parser := newParser(filename, string(src), base, sm)
|
||||
parser.mode = mode
|
||||
program, err := parser.parse()
|
||||
program.Comments = parser.comments.CommentMap
|
||||
p := newParser(filename, string(src), base, sm)
|
||||
p.mode = mode
|
||||
program, err := p.parse()
|
||||
program.Comments = p.comments.CommentMap
|
||||
|
||||
return program, err
|
||||
}
|
||||
|
@ -221,8 +221,8 @@ func ParseFile(fileSet *file.FileSet, filename string, src interface{}, mode Mod
|
|||
func ParseFunction(parameterList, body string) (*ast.FunctionLiteral, error) {
|
||||
src := "(function(" + parameterList + ") {\n" + body + "\n})"
|
||||
|
||||
parser := newParser("", src, 1, nil)
|
||||
program, err := parser.parse()
|
||||
p := newParser("", src, 1, nil)
|
||||
program, err := p.parse()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -233,11 +233,11 @@ func ParseFunction(parameterList, body string) (*ast.FunctionLiteral, error) {
|
|||
// Scan reads a single token from the source at the current offset, increments the offset and
|
||||
// returns the token.Token token, a string literal representing the value of the token (if applicable)
|
||||
// and it's current file.Idx index.
|
||||
func (p *_parser) Scan() (token.Token, string, file.Idx) {
|
||||
func (p *parser) Scan() (token.Token, string, file.Idx) {
|
||||
return p.scan()
|
||||
}
|
||||
|
||||
func (p *_parser) slice(idx0, idx1 file.Idx) string {
|
||||
func (p *parser) slice(idx0, idx1 file.Idx) string {
|
||||
from := int(idx0) - p.base
|
||||
to := int(idx1) - p.base
|
||||
if from >= 0 && to <= len(p.str) {
|
||||
|
@ -247,7 +247,7 @@ func (p *_parser) slice(idx0, idx1 file.Idx) string {
|
|||
return ""
|
||||
}
|
||||
|
||||
func (p *_parser) parse() (*ast.Program, error) {
|
||||
func (p *parser) parse() (*ast.Program, error) {
|
||||
p.next()
|
||||
program := p.parseProgram()
|
||||
if false {
|
||||
|
@ -261,11 +261,11 @@ func (p *_parser) parse() (*ast.Program, error) {
|
|||
return program, p.errors.Err()
|
||||
}
|
||||
|
||||
func (p *_parser) next() {
|
||||
func (p *parser) next() {
|
||||
p.token, p.literal, p.idx = p.scan()
|
||||
}
|
||||
|
||||
func (p *_parser) optionalSemicolon() {
|
||||
func (p *parser) optionalSemicolon() {
|
||||
if p.token == token.SEMICOLON {
|
||||
p.next()
|
||||
return
|
||||
|
@ -281,7 +281,7 @@ func (p *_parser) optionalSemicolon() {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) semicolon() {
|
||||
func (p *parser) semicolon() {
|
||||
if p.token != token.RIGHT_PARENTHESIS && p.token != token.RIGHT_BRACE {
|
||||
if p.implicitSemicolon {
|
||||
p.implicitSemicolon = false
|
||||
|
@ -292,11 +292,11 @@ func (p *_parser) semicolon() {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) idxOf(offset int) file.Idx {
|
||||
func (p *parser) idxOf(offset int) file.Idx {
|
||||
return file.Idx(p.base + offset)
|
||||
}
|
||||
|
||||
func (p *_parser) expect(value token.Token) file.Idx {
|
||||
func (p *parser) expect(value token.Token) file.Idx {
|
||||
idx := p.idx
|
||||
if p.token != value {
|
||||
p.errorUnexpectedToken(p.token)
|
||||
|
@ -329,7 +329,7 @@ func lineCount(str string) (int, int) {
|
|||
return line, last
|
||||
}
|
||||
|
||||
func (p *_parser) position(idx file.Idx) file.Position {
|
||||
func (p *parser) position(idx file.Idx) file.Position {
|
||||
position := file.Position{}
|
||||
offset := int(idx) - p.base
|
||||
str := p.str[:offset]
|
||||
|
|
|
@ -22,11 +22,11 @@ func firstErr(err error) error {
|
|||
|
||||
var matchBeforeAfterSeparator = regexp.MustCompile(`(?m)^[ \t]*---$`)
|
||||
|
||||
func testParse(src string) (*_parser, *ast.Program, error) {
|
||||
func testParse(src string) (*parser, *ast.Program, error) {
|
||||
return testParseWithMode(src, 0)
|
||||
}
|
||||
|
||||
func testParseWithMode(src string, mode Mode) (parser *_parser, program *ast.Program, err error) { //nolint: nonamedreturns
|
||||
func testParseWithMode(src string, mode Mode) (parser *parser, program *ast.Program, err error) { //nolint: nonamedreturns
|
||||
defer func() {
|
||||
if tmp := recover(); tmp != nil {
|
||||
if tmp, ok := tmp.(string); ok {
|
||||
|
@ -91,7 +91,7 @@ func TestParseFunction(t *testing.T) {
|
|||
|
||||
func TestParserErr(t *testing.T) {
|
||||
tt(t, func() {
|
||||
test := func(input string, expect interface{}) (*ast.Program, *_parser) {
|
||||
test := func(input string, expect interface{}) (*ast.Program, *parser) {
|
||||
parser := newParser("", input, 1, nil)
|
||||
program, err := parser.parse()
|
||||
is(firstErr(err), expect)
|
||||
|
|
|
@ -40,23 +40,23 @@ func TransformRegExp(pattern string) (string, error) {
|
|||
|
||||
// TODO If without \, if without (?=, (?!, then another shortcut
|
||||
|
||||
parser := regExpParser{
|
||||
p := regExpParser{
|
||||
str: pattern,
|
||||
length: len(pattern),
|
||||
goRegexp: bytes.NewBuffer(make([]byte, 0, 3*len(pattern)/2)),
|
||||
}
|
||||
parser.read() // Pull in the first character
|
||||
parser.scan()
|
||||
p.read() // Pull in the first character
|
||||
p.scan()
|
||||
var err error
|
||||
if len(parser.errors) > 0 {
|
||||
err = parser.errors[0]
|
||||
if len(p.errors) > 0 {
|
||||
err = p.errors[0]
|
||||
}
|
||||
if parser.invalid {
|
||||
if p.invalid {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Might not be re2 compatible, but is still a valid JavaScript RegExp
|
||||
return parser.goRegexp.String(), err
|
||||
return p.goRegexp.String(), err
|
||||
}
|
||||
|
||||
func (p *regExpParser) scan() {
|
||||
|
|
|
@ -15,14 +15,14 @@ type scope struct {
|
|||
labels []string
|
||||
}
|
||||
|
||||
func (p *_parser) openScope() {
|
||||
func (p *parser) openScope() {
|
||||
p.scope = &scope{
|
||||
outer: p.scope,
|
||||
allowIn: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *_parser) closeScope() {
|
||||
func (p *parser) closeScope() {
|
||||
p.scope = p.scope.outer
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
"github.com/robertkrimen/otto/token"
|
||||
)
|
||||
|
||||
func (p *_parser) parseBlockStatement() *ast.BlockStatement {
|
||||
func (p *parser) parseBlockStatement() *ast.BlockStatement {
|
||||
node := &ast.BlockStatement{}
|
||||
|
||||
// Find comments before the leading brace
|
||||
|
@ -34,12 +34,12 @@ func (p *_parser) parseBlockStatement() *ast.BlockStatement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseEmptyStatement() ast.Statement {
|
||||
func (p *parser) parseEmptyStatement() ast.Statement {
|
||||
idx := p.expect(token.SEMICOLON)
|
||||
return &ast.EmptyStatement{Semicolon: idx}
|
||||
}
|
||||
|
||||
func (p *_parser) parseStatementList() (list []ast.Statement) { //nolint: nonamedreturns
|
||||
func (p *parser) parseStatementList() (list []ast.Statement) { //nolint: nonamedreturns
|
||||
for p.token != token.RIGHT_BRACE && p.token != token.EOF {
|
||||
statement := p.parseStatement()
|
||||
list = append(list, statement)
|
||||
|
@ -48,7 +48,7 @@ func (p *_parser) parseStatementList() (list []ast.Statement) { //nolint: noname
|
|||
return list
|
||||
}
|
||||
|
||||
func (p *_parser) parseStatement() ast.Statement {
|
||||
func (p *parser) parseStatement() ast.Statement {
|
||||
if p.token == token.EOF {
|
||||
p.errorUnexpectedToken(p.token)
|
||||
return &ast.BadStatement{From: p.idx, To: p.idx + 1}
|
||||
|
@ -147,7 +147,7 @@ func (p *_parser) parseStatement() ast.Statement {
|
|||
return statement
|
||||
}
|
||||
|
||||
func (p *_parser) parseTryStatement() ast.Statement {
|
||||
func (p *parser) parseTryStatement() ast.Statement {
|
||||
var tryComments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
tryComments = p.comments.FetchAll()
|
||||
|
@ -211,7 +211,7 @@ func (p *_parser) parseTryStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseFunctionParameterList() *ast.ParameterList {
|
||||
func (p *parser) parseFunctionParameterList() *ast.ParameterList {
|
||||
opening := p.expect(token.LEFT_PARENTHESIS)
|
||||
if p.mode&StoreComments != 0 {
|
||||
p.comments.Unset()
|
||||
|
@ -240,7 +240,7 @@ func (p *_parser) parseFunctionParameterList() *ast.ParameterList {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseFunctionStatement() *ast.FunctionStatement {
|
||||
func (p *parser) parseFunctionStatement() *ast.FunctionStatement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -255,7 +255,7 @@ func (p *_parser) parseFunctionStatement() *ast.FunctionStatement {
|
|||
return function
|
||||
}
|
||||
|
||||
func (p *_parser) parseFunction(declaration bool) *ast.FunctionLiteral {
|
||||
func (p *parser) parseFunction(declaration bool) *ast.FunctionLiteral {
|
||||
node := &ast.FunctionLiteral{
|
||||
Function: p.expect(token.FUNCTION),
|
||||
}
|
||||
|
@ -283,7 +283,7 @@ func (p *_parser) parseFunction(declaration bool) *ast.FunctionLiteral {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseFunctionBlock(node *ast.FunctionLiteral) {
|
||||
func (p *parser) parseFunctionBlock(node *ast.FunctionLiteral) {
|
||||
p.openScope()
|
||||
inFunction := p.scope.inFunction
|
||||
p.scope.inFunction = true
|
||||
|
@ -295,7 +295,7 @@ func (p *_parser) parseFunctionBlock(node *ast.FunctionLiteral) {
|
|||
node.DeclarationList = p.scope.declarationList
|
||||
}
|
||||
|
||||
func (p *_parser) parseDebuggerStatement() ast.Statement {
|
||||
func (p *parser) parseDebuggerStatement() ast.Statement {
|
||||
idx := p.expect(token.DEBUGGER)
|
||||
|
||||
node := &ast.DebuggerStatement{
|
||||
|
@ -309,7 +309,7 @@ func (p *_parser) parseDebuggerStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseReturnStatement() ast.Statement {
|
||||
func (p *parser) parseReturnStatement() ast.Statement {
|
||||
idx := p.expect(token.RETURN)
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
|
@ -338,7 +338,7 @@ func (p *_parser) parseReturnStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseThrowStatement() ast.Statement {
|
||||
func (p *parser) parseThrowStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -368,7 +368,7 @@ func (p *_parser) parseThrowStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseSwitchStatement() ast.Statement {
|
||||
func (p *parser) parseSwitchStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -418,7 +418,7 @@ func (p *_parser) parseSwitchStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseWithStatement() ast.Statement {
|
||||
func (p *parser) parseWithStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -446,7 +446,7 @@ func (p *_parser) parseWithStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseCaseStatement() *ast.CaseStatement {
|
||||
func (p *parser) parseCaseStatement() *ast.CaseStatement {
|
||||
node := &ast.CaseStatement{
|
||||
Case: p.idx,
|
||||
}
|
||||
|
@ -488,7 +488,7 @@ func (p *_parser) parseCaseStatement() *ast.CaseStatement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseIterationStatement() ast.Statement {
|
||||
func (p *parser) parseIterationStatement() ast.Statement {
|
||||
inIteration := p.scope.inIteration
|
||||
p.scope.inIteration = true
|
||||
defer func() {
|
||||
|
@ -497,7 +497,7 @@ func (p *_parser) parseIterationStatement() ast.Statement {
|
|||
return p.parseStatement()
|
||||
}
|
||||
|
||||
func (p *_parser) parseForIn(into ast.Expression) *ast.ForInStatement {
|
||||
func (p *parser) parseForIn(into ast.Expression) *ast.ForInStatement {
|
||||
// Already have consumed "<into> in"
|
||||
|
||||
source := p.parseExpression()
|
||||
|
@ -513,7 +513,7 @@ func (p *_parser) parseForIn(into ast.Expression) *ast.ForInStatement {
|
|||
return forin
|
||||
}
|
||||
|
||||
func (p *_parser) parseFor(initializer ast.Expression) *ast.ForStatement {
|
||||
func (p *parser) parseFor(initializer ast.Expression) *ast.ForStatement {
|
||||
// Already have consumed "<initializer> ;"
|
||||
|
||||
var test, update ast.Expression
|
||||
|
@ -542,7 +542,7 @@ func (p *_parser) parseFor(initializer ast.Expression) *ast.ForStatement {
|
|||
return forstatement
|
||||
}
|
||||
|
||||
func (p *_parser) parseForOrForInStatement() ast.Statement {
|
||||
func (p *parser) parseForOrForInStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -623,7 +623,7 @@ func (p *_parser) parseForOrForInStatement() ast.Statement {
|
|||
return forstatement
|
||||
}
|
||||
|
||||
func (p *_parser) parseVariableStatement() *ast.VariableStatement {
|
||||
func (p *parser) parseVariableStatement() *ast.VariableStatement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -645,7 +645,7 @@ func (p *_parser) parseVariableStatement() *ast.VariableStatement {
|
|||
return statement
|
||||
}
|
||||
|
||||
func (p *_parser) parseDoWhileStatement() ast.Statement {
|
||||
func (p *parser) parseDoWhileStatement() ast.Statement {
|
||||
inIteration := p.scope.inIteration
|
||||
p.scope.inIteration = true
|
||||
defer func() {
|
||||
|
@ -690,7 +690,7 @@ func (p *_parser) parseDoWhileStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseWhileStatement() ast.Statement {
|
||||
func (p *parser) parseWhileStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -717,7 +717,7 @@ func (p *_parser) parseWhileStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseIfStatement() ast.Statement {
|
||||
func (p *parser) parseIfStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -753,12 +753,12 @@ func (p *_parser) parseIfStatement() ast.Statement {
|
|||
return node
|
||||
}
|
||||
|
||||
func (p *_parser) parseSourceElement() ast.Statement {
|
||||
func (p *parser) parseSourceElement() ast.Statement {
|
||||
statement := p.parseStatement()
|
||||
return statement
|
||||
}
|
||||
|
||||
func (p *_parser) parseSourceElements() []ast.Statement {
|
||||
func (p *parser) parseSourceElements() []ast.Statement {
|
||||
body := []ast.Statement(nil)
|
||||
|
||||
for {
|
||||
|
@ -775,7 +775,7 @@ func (p *_parser) parseSourceElements() []ast.Statement {
|
|||
return body
|
||||
}
|
||||
|
||||
func (p *_parser) parseProgram() *ast.Program {
|
||||
func (p *parser) parseProgram() *ast.Program {
|
||||
p.openScope()
|
||||
defer p.closeScope()
|
||||
return &ast.Program{
|
||||
|
@ -785,7 +785,7 @@ func (p *_parser) parseProgram() *ast.Program {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *_parser) parseBreakStatement() ast.Statement {
|
||||
func (p *parser) parseBreakStatement() ast.Statement {
|
||||
var comments []*ast.Comment
|
||||
if p.mode&StoreComments != 0 {
|
||||
comments = p.comments.FetchAll()
|
||||
|
@ -842,7 +842,7 @@ illegal:
|
|||
return &ast.BadStatement{From: idx, To: p.idx}
|
||||
}
|
||||
|
||||
func (p *_parser) parseContinueStatement() ast.Statement {
|
||||
func (p *parser) parseContinueStatement() ast.Statement {
|
||||
idx := p.expect(token.CONTINUE)
|
||||
semicolon := p.implicitSemicolon
|
||||
if p.token == token.SEMICOLON {
|
||||
|
@ -887,7 +887,7 @@ illegal:
|
|||
}
|
||||
|
||||
// Find the next statement after an error (recover).
|
||||
func (p *_parser) nextStatement() {
|
||||
func (p *parser) nextStatement() {
|
||||
for {
|
||||
switch p.token {
|
||||
case token.BREAK, token.CONTINUE,
|
||||
|
|
Loading…
Reference in New Issue
Block a user