Skip to content

Commit

Permalink
Adding postfix expressions (++ and --) - changed evaluation order to …
Browse files Browse the repository at this point in the history
…be more human friendly (appears left to right now) (#73)

* Adding postfix expressions (++ and --) - wip

* working kinda in eval but... parsing show a,a--

* finally working, the treat is to handle it in identifier and not in expression

* handle float ++/--

* remove uneeded (for postfix hacked as identifier modifier) precendence

* got excellent suggestion to do natural left to right eval which actually meant swapping left and right. also removed 1 level of () when outer expression is assign; so a=1+2 is a = 1 + 2 and not a = (1 + 2)

* fixed string based checked that escaped #70, better comments/code too
  • Loading branch information
ldemailly authored Jul 28, 2024
1 parent 6864eab commit 4c948a9
Show file tree
Hide file tree
Showing 10 changed files with 149 additions and 42 deletions.
26 changes: 24 additions & 2 deletions ast/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,23 @@ func (p PrefixExpression) PrettyPrint(out *PrintState) *PrintState {
return out
}

type PostfixExpression struct {
Base
Prev *token.Token
}

func (p PostfixExpression) PrettyPrint(out *PrintState) *PrintState {
if out.ExpressionLevel > 0 {
out.Print("(")
}
out.Print(p.Prev.Literal())
out.Print(p.Literal())
if out.ExpressionLevel > 0 {
out.Print(")")
}
return out
}

type InfixExpression struct {
Base
Left Node
Expand All @@ -186,11 +203,16 @@ func (i InfixExpression) PrettyPrint(out *PrintState) *PrintState {
if out.ExpressionLevel > 0 { // TODO only add parens if precedence requires it.
out.Print("(")
}
out.ExpressionLevel++
isAssign := (i.Token.Type() == token.ASSIGN)
if !isAssign {
out.ExpressionLevel++
}
i.Left.PrettyPrint(out)
out.Print(" ", i.Literal(), " ")
i.Right.PrettyPrint(out)
out.ExpressionLevel--
if !isAssign {
out.ExpressionLevel--
}
if out.ExpressionLevel > 0 {
out.Print(")")
}
Expand Down
37 changes: 34 additions & 3 deletions eval/eval.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,33 @@ func ArgCheck[T any](msg string, n int, vararg bool, args []T) *object.Error {
return nil
}

func (s *State) evalPostfixExpression(node *ast.PostfixExpression) object.Object {
log.LogVf("eval postfix %s", node.DebugString())
id := node.Prev.Literal()
val, ok := s.env.Get(id)
if !ok {
return object.Error{Value: "<identifier not found: " + id + ">"}
}
var toAdd int64
switch node.Type() { //nolint:exhaustive // we have default.
case token.INCR:
toAdd = 1
case token.DECR:
toAdd = -1
default:
return object.Error{Value: "unknown postfix operator: " + node.Type().String()}
}
switch val := val.(type) {
case object.Integer:
s.env.Set(id, object.Integer{Value: val.Value + toAdd})
case object.Float:
s.env.Set(id, object.Float{Value: val.Value + float64(toAdd)})
default:
return object.Error{Value: "can't increment/decrement " + val.Type().String()}
}
return val
}

// Doesn't unwrap return - return bubbles up.
func (s *State) evalInternal(node any) object.Object {
switch node := node.(type) {
Expand All @@ -88,13 +115,17 @@ func (s *State) evalInternal(node any) object.Object {
log.LogVf("eval prefix %s", node.DebugString())
right := s.evalInternal(node.Right)
return s.evalPrefixExpression(node.Type(), right)
case *ast.PostfixExpression:
return s.evalPostfixExpression(node)
case *ast.InfixExpression:
log.LogVf("eval infix %s", node.DebugString())
right := s.Eval(node.Right) // need to unwrap "return"
if node.Literal() == "=" {
return s.evalAssignment(right, node)
// Eval and not evalInternal because we need to unwrap "return".
if node.Token.Type() == token.ASSIGN {
return s.evalAssignment(s.Eval(node.Right), node)
}
// Humans expect left to right evaluations.
left := s.Eval(node.Left)
right := s.Eval(node.Right)
return s.evalInfixExpression(node.Type(), left, right)

case *ast.IntegerLiteral:
Expand Down
6 changes: 6 additions & 0 deletions eval/eval_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,11 @@ func TestEvalIntegerExpression(t *testing.T) {
{"20 % -5", 0},
{"-21 % 5", -1},
{`fact = func(n) {if (n<2) {return 1} n*fact(n-1)}; fact(5)`, 120},
{`a=2; b=3; r=a+5*b++`, 17},
{`a=2; b=3; r=a+5*b++;b`, 4},
{`a=2; b=3; r=a+5*(b++)+b;`, 21}, // left to right eval, yet not that not well defined behavior.
{`a=2; b=3; r=b+5*(b++)+a;`, 20}, // because that solo b is evaluated last, after the b++ - not well defined behavior.
{`a=2; b=3; r=a+5*b+++b;`, 21}, // parentheses are not technically needed here though... this is rather un readable.
}

for i, tt := range tests {
Expand Down Expand Up @@ -704,6 +709,7 @@ func TestEvalFloatExpression(t *testing.T) {
{".3", 0.3},
{"0.5*3", 1.5},
{"0.5*6", 3},
{`a=3.1; a--; a`, 2.1},
}

for i, tt := range tests {
Expand Down
10 changes: 8 additions & 2 deletions lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,14 @@ func (l *Lexer) NextToken() *token.Token {
return token.ConstantTokenStr(literal)
}
return token.ConstantTokenChar(ch)

case '%', '*', '+', ';', ',', '{', '}', '(', ')', '[', ']', '-':
case '+', '-':
if l.peekChar() == ch {
nextChar := l.readChar()
literal := string(ch) + string(nextChar) // TODO: consider making a ContantTokenChar2 instead of making a string
return token.ConstantTokenStr(literal) // increment/decrement
}
return token.ConstantTokenChar(ch)
case '%', '*', ';', ',', '{', '}', '(', ')', '[', ']':
// TODO maybe reorder so it's a continuous range for pure single character tokens
return token.ConstantTokenChar(ch)
case '/':
Expand Down
6 changes: 6 additions & 0 deletions lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ return // nil return
macro(x, y) { x + y }
a3:=5
4>=3.1
i++
j--
@
`

Expand Down Expand Up @@ -151,6 +153,10 @@ a3:=5
{token.INT, "4"},
{token.GTEQ, ">="},
{token.FLOAT, "3.1"},
{token.IDENT, "i"},
{token.INCR, "++"},
{token.IDENT, "j"},
{token.DECR, "--"},
{token.ILLEGAL, "@"},
{token.EOF, ""},
}
Expand Down
34 changes: 29 additions & 5 deletions parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,21 +29,24 @@ const (
var _ = CALL.String() // force compile error if go generate is missing.

type (
prefixParseFn func() ast.Node
infixParseFn func(ast.Node) ast.Node
prefixParseFn func() ast.Node
infixParseFn func(ast.Node) ast.Node
postfixParseFn prefixParseFn
)

type Parser struct {
l *lexer.Lexer

prevToken *token.Token
curToken *token.Token
peekToken *token.Token

errors []string
continuationNeeded bool

prefixParseFns map[token.Type]prefixParseFn
infixParseFns map[token.Type]infixParseFn
prefixParseFns map[token.Type]prefixParseFn
infixParseFns map[token.Type]infixParseFn
postfixParseFns map[token.Type]postfixParseFn
}

func (p *Parser) ContinuationNeeded() bool {
Expand All @@ -58,6 +61,10 @@ func (p *Parser) registerInfix(t token.Type, fn infixParseFn) {
p.infixParseFns[t] = fn
}

func (p *Parser) registerPostfix(t token.Type, fn postfixParseFn) {
p.postfixParseFns[t] = fn
}

func New(l *lexer.Lexer) *Parser {
p := &Parser{
l: l,
Expand Down Expand Up @@ -103,10 +110,13 @@ func New(l *lexer.Lexer) *Parser {
p.registerInfix(token.GTEQ, p.parseInfixExpression)
p.registerInfix(token.LPAREN, p.parseCallExpression)
p.registerInfix(token.LBRACKET, p.parseIndexExpression)

// no let:
p.registerInfix(token.ASSIGN, p.parseInfixExpression)

p.postfixParseFns = make(map[token.Type]postfixParseFn)
p.registerPostfix(token.INCR, p.parsePostfixExpression)
p.registerPostfix(token.DECR, p.parsePostfixExpression)

// Read two tokens, so curToken and peekToken are both set
p.nextToken()
p.nextToken()
Expand All @@ -119,6 +129,7 @@ func (p *Parser) Errors() []string {
}

func (p *Parser) nextToken() {
p.prevToken = p.curToken
p.curToken = p.peekToken
p.peekToken = p.l.NextToken()
}
Expand Down Expand Up @@ -261,6 +272,12 @@ func (p *Parser) parseExpression(precedence Priority) ast.Node {
}

func (p *Parser) parseIdentifier() ast.Node {
postfix := p.postfixParseFns[p.peekToken.Type()]
if postfix != nil {
log.LogVf("parseIdentifier: next is a postfix for %s: %s", p.curToken.DebugString(), p.peekToken.DebugString())
p.nextToken()
return postfix()
}
i := &ast.Identifier{}
i.Token = p.curToken
return i
Expand Down Expand Up @@ -324,6 +341,13 @@ func (p *Parser) parsePrefixExpression() ast.Node {
return expression
}

func (p *Parser) parsePostfixExpression() ast.Node {
expression := &ast.PostfixExpression{}
expression.Token = p.curToken
expression.Prev = p.prevToken
return expression
}

var precedences = map[token.Type]Priority{
token.ASSIGN: ASSIGN,
token.EQ: EQUALS,
Expand Down
20 changes: 12 additions & 8 deletions parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ foobar = 838383;
p := parser.New(l)

program := p.ParseProgram()
checkParserErrors(t, p)
checkParserErrors(t, input, p)
if program == nil {
t.Fatalf("ParseProgram() returned nil")
}
Expand All @@ -43,13 +43,13 @@ foobar = 838383;
}
}

func checkParserErrors(t *testing.T, p *parser.Parser) {
func checkParserErrors(t *testing.T, input string, p *parser.Parser) {
errors := p.Errors()
if len(errors) == 0 {
return
}

t.Errorf("parser has %d error(s)", len(errors))
t.Errorf("parser has %d error(s) for %q", len(errors), input)
for _, msg := range errors {
t.Errorf("parser error: %s", msg)
}
Expand All @@ -66,7 +66,7 @@ return 993322;
p := parser.New(l)

program := p.ParseProgram()
checkParserErrors(t, p)
checkParserErrors(t, input, p)

if len(program.Statements) != 3 {
t.Fatalf("program.Statements does not contain 3 statements. got=%d",
Expand All @@ -92,7 +92,7 @@ func Test_IdentifierExpression(t *testing.T) {
l := lexer.New(input)
p := parser.New(l)
program := p.ParseProgram()
checkParserErrors(t, p)
checkParserErrors(t, input, p)

if len(program.Statements) != 1 {
t.Fatalf("program has not enough statements. got=%d",
Expand Down Expand Up @@ -134,8 +134,12 @@ func Test_OperatorPrecedenceParsing(t *testing.T) {
"!(-a)", // or maybe !-a - it's more compact but... less readable?
},
{
"--a",
"-(-a)",
"-(-a)",
},
{
"a--",
"a--",
},
{
"a + b + c",
Expand Down Expand Up @@ -179,7 +183,7 @@ func Test_OperatorPrecedenceParsing(t *testing.T) {
},
{
"x = 41 * 6",
"x = (41 * 6)",
"x = 41 * 6", // = doesn't trigger expression level so it's more natural to read.
},
{
"foo = func(a,b) {return a+b}",
Expand All @@ -191,7 +195,7 @@ func Test_OperatorPrecedenceParsing(t *testing.T) {
l := lexer.New(tt.input)
p := parser.New(l)
program := p.ParseProgram()
checkParserErrors(t, p)
checkParserErrors(t, tt.input, p)

actual := ast.DebugString(program)
last := actual[len(actual)-1]
Expand Down
4 changes: 4 additions & 0 deletions token/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@ const (
GTEQ
EQ
NOTEQ
INCR
DECR

endMultiCharTokens

Expand Down Expand Up @@ -226,6 +228,8 @@ func Init() {
assocS(GTEQ, ">=")
assocS(EQ, "==")
assocS(NOTEQ, "!=")
assocS(INCR, "++")
assocS(DECR, "--")
// Special alias for := to be same as ASSIGN.
sTokens[":="] = cTokens['=']
}
Expand Down
2 changes: 2 additions & 0 deletions token/token_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ func TestMultiCharTokens(t *testing.T) {
{"!=", NOTEQ},
{">=", GTEQ},
{"<=", LTEQ},
{"++", INCR},
{"--", DECR},
}
for _, tt := range tests {
tok := &Token{tokenType: tt.expected, literal: tt.input}
Expand Down
Loading

0 comments on commit 4c948a9

Please sign in to comment.