Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
estevanbs committed Sep 22, 2023
2 parents 60d7285 + 6cb465a commit 58c87d3
Show file tree
Hide file tree
Showing 3 changed files with 280 additions and 0 deletions.
54 changes: 54 additions & 0 deletions ast/ast.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package ast

import "monkey/token"

type Node interface {
TokenLiteral() string
}

type Statement interface {
Node
statementNode()
}

type Expression interface {
Node
expressionNode()
}

type Program struct {
Statements []Statement
}

func (p *Program) TokenLiteral() string {
if len(p.Statements) > 0 {
return p.Statements[0].TokenLiteral()
} else {
return ""
}
}

type LetStatement struct {
Token token.Token // the token.LET token
Name *Identifier
Value Expression
}

func (ls *LetStatement) statementNode() {}
func (ls *LetStatement) TokenLiteral() string { return ls.Token.Literal }

type ReturnStatement struct {
Token token.Token // the 'return' token
ReturnValue Expression
}

func (rs *ReturnStatement) statementNode() {}
func (rs *ReturnStatement) TokenLiteral() string { return rs.Token.Literal }

type Identifier struct {
Token token.Token // the token.IDENT token
Value string
}

func (i *Identifier) expressionNode() {}
func (i *Identifier) TokenLiteral() string { return i.Token.Literal }
118 changes: 118 additions & 0 deletions parser/parser.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
package parser

import (
"fmt"
"monkey/ast"
"monkey/lexer"
"monkey/token"
)

type Parser struct {
l *lexer.Lexer

errors []string

curToken token.Token
peekToken token.Token
}

func New(l *lexer.Lexer) *Parser {
p := &Parser{
l: l,
errors: []string{},
}

p.nextToken()
p.nextToken()

return p
}

func (p *Parser) Errors() []string {
return p.errors
}

func (p *Parser) peekError(t token.TokenType) {
msg := fmt.Sprintf("expected next token to be %s, got %s instead",
t, p.peekToken.Type)
p.errors = append(p.errors, msg)
}

func (p *Parser) nextToken() {
p.curToken = p.peekToken
p.peekToken = p.l.NextToken()
}

func (p *Parser) ParseProgram() *ast.Program {
program := &ast.Program{}
program.Statements = []ast.Statement{}

for !p.curTokenIs(token.EOF) {
stmt := p.parseStatement()
if stmt != nil {
program.Statements = append(program.Statements, stmt)
}
p.nextToken()
}

return program
}

func (p *Parser) parseStatement() ast.Statement {
switch p.curToken.Type {
case token.LET:
return p.parseLetStatement()
case token.RETURN:
return p.parseReturnStatement()
default:
return nil
}
}

func (p *Parser) parseLetStatement() *ast.LetStatement {
stmt := &ast.LetStatement{Token: p.curToken}

if !p.expectPeek(token.IDENT) {
return nil
}

stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}

if !p.expectPeek(token.ASSIGN) {
return nil
}

for !p.curTokenIs(token.SEMICOLON) {
p.nextToken()
}

return stmt
}

func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
stmt := &ast.ReturnStatement{Token: p.curToken}
p.nextToken()

for !p.curTokenIs(token.SEMICOLON) {
p.nextToken()
}
return stmt
}

func (p *Parser) curTokenIs(t token.TokenType) bool {
return p.curToken.Type == t
}

func (p *Parser) peekTokenIs(t token.TokenType) bool {
return p.peekToken.Type == t
}

func (p *Parser) expectPeek(t token.TokenType) bool {
if p.peekTokenIs(t) {
p.nextToken()
return true
} else {
p.peekError(t)
return false
}
}
108 changes: 108 additions & 0 deletions parser/parser_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
package parser

import (
"monkey/ast"
"monkey/lexer"
"testing"
)

func TestLetStatements(t *testing.T) {
input := `
let x = 5;
let y = 10;
let foobar = 838383;
`

l := lexer.New(input)
p := New(l)

program := p.ParseProgram()
checkParserErrors(t, p)

if program == nil {
t.Fatalf("ParseProgram() returned nil")
}
if len(program.Statements) != 3 {
t.Fatalf("program.Statements does not contain 3 statements. got=%d",
len(program.Statements))
}

tests := []struct {
expectedIdentifier string
}{
{"x"},
{"y"},
{"foobar"},
}

for i, tt := range tests {
stmt := program.Statements[i]
if !testLetStatement(t, stmt, tt.expectedIdentifier) {
return
}
}
}

func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
if s.TokenLiteral() != "let" {
t.Errorf("s.TokenLiteral not 'let'. got=%q", s.TokenLiteral())
return false
}

letStmt, ok := s.(*ast.LetStatement)
if !ok {
t.Errorf("s not *ast.LetStatement. got=%T", s)
return false
}

if letStmt.Name.Value != name {
t.Errorf("letStmt.Name.Value not '%s'. got=%s", name, letStmt.Name.Value)
return false
}

if letStmt.Name.TokenLiteral() != name {
t.Errorf("s.Name not '%s'. got=%s", name, letStmt.Name)
return false
}

return true
}

func TestReturnStatements(t *testing.T) {
input := `
return 5;
return 10;
return 993322;
`
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
checkParserErrors(t, p)
if len(program.Statements) != 3 {
t.Fatalf("program.Statements does not contain 3 statements. got=%d",
len(program.Statements))
}
for _, stmt := range program.Statements {
returnStmt, ok := stmt.(*ast.ReturnStatement)
if !ok {
t.Errorf("stmt not *ast.returnStatement. got=%T", stmt)
continue
}
if returnStmt.TokenLiteral() != "return" {
t.Errorf("returnStmt.TokenLiteral not 'return', got %q",
returnStmt.TokenLiteral())
}
}
}

func checkParserErrors(t *testing.T, p *Parser) {
errors := p.Errors()
if len(errors) == 0 {
return
}
t.Errorf("parser has %d errors", len(errors))
for _, msg := range errors {
t.Errorf("parser error: %q", msg)
}
t.FailNow()
}

0 comments on commit 58c87d3

Please sign in to comment.