Skip to content

Commit

Permalink
FIX: error caused by mistake in checking for EOF token;
Browse files Browse the repository at this point in the history
DOCS: some notes on 'how does Pratt parser actually work?', may need to re-read this stuff;
74/208 @ pdf
  • Loading branch information
MKaczkow committed Sep 3, 2024
1 parent 39aca0b commit b555812
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 17 deletions.
14 changes: 14 additions & 0 deletions monkey/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,20 @@ func (p *Parser) expectPeek(t token.TokenType) bool {
- `call expression` (e.g. `add(1, 2)`)
- ...
* `parseExpression` method is recursive, because expressions can be nested, which is the clou of the whole parser idea (*recursive descent parser*)
* `Top Down Operator Precedence` by V.Pratt [link](https://tdop.github.io/), but with differences:
* `nuds` (null denotation) -> `prefixParseFns`
* `leds` (left denotation) -> `infixParseFns`
* challenge is, to nest the nodes in AST correctly, so that
```math
1 + 2 + 3
```
which *mathematically* is
```math
((1 + 2) + 3)
```
is represented as:
![monkey-math-ast](./img/monkey-interpreter-02.png)


### repl
* `REPL` - `Read-Eval-Print Loop`
Expand Down
Binary file added monkey/img/monkey-interpreter-02.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
35 changes: 18 additions & 17 deletions monkey/interpreter/parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,34 +20,33 @@ const (
CALL // myFunction(X)
)

var precedences = map[token.TokenType]int{
token.EQ: EQUALS,
token.NOT_EQ: EQUALS,
token.LT: LESSGREATER,
token.GT: LESSGREATER,
token.PLUS: SUM,
token.MINUS: SUM,
token.SLASH: PRODUCT,
token.ASTERISK: PRODUCT,
}

type (
prefixParseFn func() ast.Expression
infixParseFn func(ast.Expression) ast.Expression
)

type Parser struct {
l *lexer.Lexer
l *lexer.Lexer
errors []string

curToken token.Token
peekToken token.Token

errors []string

prefixParseFns map[token.TokenType]prefixParseFn
infixParseFns map[token.TokenType]infixParseFn
}

var precedences = map[token.TokenType]int{
token.EQ: EQUALS,
token.NOT_EQ: EQUALS,
token.LT: LESSGREATER,
token.GT: LESSGREATER,
token.PLUS: SUM,
token.MINUS: SUM,
token.SLASH: PRODUCT,
token.ASTERISK: PRODUCT,
}

func New(l *lexer.Lexer) *Parser {
p := &Parser{
l: l,
Expand Down Expand Up @@ -87,7 +86,7 @@ func (p *Parser) ParseProgram() *ast.Program {
program := &ast.Program{}
program.Statements = []ast.Statement{}

for p.curToken.Type != token.EOF {
for !p.curTokenIs(token.EOF) {
stmt := p.parseStatement()
if stmt != nil {
program.Statements = append(program.Statements, stmt)
Expand Down Expand Up @@ -168,15 +167,16 @@ func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
}

func (p *Parser) parseExpression(precedence int) ast.Expression {
// is there a prefixParseFn associated with the current token type?
prefix := p.prefixParseFns[p.curToken.Type]
if prefix == nil {
p.noPrefixParseFnError(p.curToken.Type)
return nil
}
leftExp := prefix()

if !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
// this second condition is really the heart of the recursive descent parser
// this second condition is really the heart of the recursive descent parser
for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
infix := p.infixParseFns[p.peekToken.Type]
if infix == nil {
return leftExp
Expand All @@ -186,6 +186,7 @@ func (p *Parser) parseExpression(precedence int) ast.Expression {

leftExp = infix(leftExp)
}

return leftExp
}

Expand Down

0 comments on commit b555812

Please sign in to comment.