Add all the files from the interpreter repo
Signed-off-by: jmug <u.g.a.mariano@gmail.com>
This commit is contained in:
parent
230fe61b12
commit
0acd1d41e8
34 changed files with 3784 additions and 0 deletions
19
cmd/repl/main.go
Normal file
19
cmd/repl/main.go
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/user"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/repl"
|
||||
)
|
||||
|
||||
func main() {
|
||||
user, err := user.Current()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Printf("Hello %s, this is the Monkey programming language!\n", user.Username)
|
||||
fmt.Println("Go ahead, type something :)")
|
||||
repl.Start(os.Stdin, os.Stdout)
|
||||
}
|
||||
3
go.mod
Normal file
3
go.mod
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
module code.jmug.me/jmug/interpreter-in-go
|
||||
|
||||
go 1.23.3
|
||||
29
pkg/ast/array.go
Normal file
29
pkg/ast/array.go
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type ArrayLiteral struct {
|
||||
Token token.Token // The '[' token
|
||||
Elements []Expression
|
||||
}
|
||||
|
||||
func (al *ArrayLiteral) expressionNode() {}
|
||||
func (al *ArrayLiteral) TokenLiteral() string {
|
||||
return al.Token.Literal
|
||||
}
|
||||
func (al *ArrayLiteral) String() string {
|
||||
var out bytes.Buffer
|
||||
elements := []string{}
|
||||
for _, el := range al.Elements {
|
||||
elements = append(elements, el.String())
|
||||
}
|
||||
out.WriteString("[")
|
||||
out.WriteString(strings.Join(elements, ", "))
|
||||
out.WriteString("]")
|
||||
return out.String()
|
||||
}
|
||||
39
pkg/ast/ast.go
Normal file
39
pkg/ast/ast.go
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
)
|
||||
|
||||
type Node interface {
|
||||
TokenLiteral() string
|
||||
String() string
|
||||
}
|
||||
|
||||
type Statement interface {
|
||||
Node
|
||||
statementNode()
|
||||
}
|
||||
|
||||
type Expression interface {
|
||||
Node
|
||||
expressionNode()
|
||||
}
|
||||
|
||||
type Program struct {
|
||||
Statements []Statement
|
||||
}
|
||||
|
||||
func (p *Program) TokenLiteral() string {
|
||||
if len(p.Statements) > 0 {
|
||||
return p.Statements[0].TokenLiteral()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (p *Program) String() string {
|
||||
var out bytes.Buffer
|
||||
for _, stmt := range p.Statements {
|
||||
out.WriteString(stmt.String())
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
29
pkg/ast/ast_test.go
Normal file
29
pkg/ast/ast_test.go
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
func TestString(t *testing.T) {
|
||||
program := &Program{
|
||||
Statements: []Statement{
|
||||
&LetStatement{
|
||||
Token: token.Token{Type: token.LET, Literal: "let"},
|
||||
Name: &Identifier{
|
||||
Token: token.Token{Type: token.IDENT, Literal: "myVar"},
|
||||
Value: "myVar",
|
||||
},
|
||||
Value: &Identifier{
|
||||
Token: token.Token{Type: token.IDENT, Literal: "anotherVar"},
|
||||
Value: "anotherVar",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if program.String() != "let myVar = anotherVar;" {
|
||||
t.Errorf("program.String() wrong. got=%q", program.String())
|
||||
}
|
||||
}
|
||||
24
pkg/ast/block.go
Normal file
24
pkg/ast/block.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type BlockStatement struct {
|
||||
Token token.Token // The `{` token.
|
||||
Statements []Statement
|
||||
}
|
||||
|
||||
func (bs *BlockStatement) statementNode() {}
|
||||
func (bs *BlockStatement) TokenLiteral() string {
|
||||
return bs.Token.Literal
|
||||
}
|
||||
func (bs *BlockStatement) String() string {
|
||||
var out bytes.Buffer
|
||||
for _, s := range bs.Statements {
|
||||
out.WriteString(s.String())
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
16
pkg/ast/boolean.go
Normal file
16
pkg/ast/boolean.go
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
type Boolean struct {
|
||||
Token token.Token
|
||||
Value bool
|
||||
}
|
||||
|
||||
func (bl *Boolean) expressionNode() {}
|
||||
func (bl *Boolean) TokenLiteral() string {
|
||||
return bl.Token.Literal
|
||||
}
|
||||
func (bl *Boolean) String() string {
|
||||
return bl.Token.Literal
|
||||
}
|
||||
31
pkg/ast/call.go
Normal file
31
pkg/ast/call.go
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type CallExpression struct {
|
||||
Token token.Token // The ( token
|
||||
Function Expression
|
||||
Arguments []Expression
|
||||
}
|
||||
|
||||
func (ce *CallExpression) expressionNode() {}
|
||||
func (ce *CallExpression) TokenLiteral() string {
|
||||
return ce.Token.Literal
|
||||
}
|
||||
func (ce *CallExpression) String() string {
|
||||
var out bytes.Buffer
|
||||
out.WriteString(ce.Function.String())
|
||||
out.WriteString("(")
|
||||
args := []string{}
|
||||
for _, arg := range ce.Arguments {
|
||||
args = append(args, arg.String())
|
||||
}
|
||||
out.WriteString(strings.Join(args, ", "))
|
||||
out.WriteString(")")
|
||||
return out.String()
|
||||
}
|
||||
23
pkg/ast/expression_statement.go
Normal file
23
pkg/ast/expression_statement.go
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
// ExpressionStatement is a simple wrapper of an expression in a statement
|
||||
// This is common in scripting languages and allows you to have a source line
|
||||
// that is solely an expression, think of the Python REPL and how you can
|
||||
// type `1 + 1` and get a result.
|
||||
type ExpressionStatement struct {
|
||||
Token token.Token // The first token in the expression.
|
||||
Expression Expression
|
||||
}
|
||||
|
||||
func (es *ExpressionStatement) statementNode() {}
|
||||
func (es *ExpressionStatement) TokenLiteral() string {
|
||||
return es.Token.Literal
|
||||
}
|
||||
func (es *ExpressionStatement) String() string {
|
||||
if es.Expression != nil {
|
||||
return es.Expression.String()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
32
pkg/ast/function.go
Normal file
32
pkg/ast/function.go
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type FunctionLiteral struct {
|
||||
Token token.Token // The fn token
|
||||
Parameters []*Identifier
|
||||
Body *BlockStatement
|
||||
}
|
||||
|
||||
func (fl *FunctionLiteral) expressionNode() {}
|
||||
func (fl *FunctionLiteral) TokenLiteral() string {
|
||||
return fl.Token.Literal
|
||||
}
|
||||
func (fl *FunctionLiteral) String() string {
|
||||
var out bytes.Buffer
|
||||
params := []string{}
|
||||
for _, p := range fl.Parameters {
|
||||
params = append(params, p.String())
|
||||
}
|
||||
out.WriteString(fl.TokenLiteral())
|
||||
out.WriteString("(")
|
||||
out.WriteString(strings.Join(params, ", "))
|
||||
out.WriteString(") ")
|
||||
out.WriteString(fl.Body.String())
|
||||
return out.String()
|
||||
}
|
||||
24
pkg/ast/hash.go
Normal file
24
pkg/ast/hash.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type HashLiteral struct {
|
||||
Token token.Token // The "{" token
|
||||
Pairs map[Expression]Expression
|
||||
}
|
||||
|
||||
func (hl *HashLiteral) expressionNode() {}
|
||||
func (hl *HashLiteral) TokenLiteral() string {
|
||||
return hl.Token.Literal
|
||||
}
|
||||
func (hl *HashLiteral) String() string {
|
||||
pairs := []string{}
|
||||
for k, v := range hl.Pairs {
|
||||
pairs = append(pairs, k.String()+":"+v.String())
|
||||
}
|
||||
return "{" + strings.Join(pairs, ", ") + "}"
|
||||
}
|
||||
20
pkg/ast/identifier.go
Normal file
20
pkg/ast/identifier.go
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
// Identifier is treated as an expression because in certain
|
||||
// circumstances they can return values (think `let some = other` where `other`
|
||||
// is actually an expression returning a value) and this makes them easier to
|
||||
// handle (according to the author).
|
||||
type Identifier struct {
|
||||
Token token.Token
|
||||
Value string
|
||||
}
|
||||
|
||||
func (i *Identifier) expressionNode() {}
|
||||
func (i *Identifier) TokenLiteral() string {
|
||||
return i.Token.Literal
|
||||
}
|
||||
func (i *Identifier) String() string {
|
||||
return i.Value
|
||||
}
|
||||
31
pkg/ast/if_expression.go
Normal file
31
pkg/ast/if_expression.go
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type IfExpression struct {
|
||||
Token token.Token // The `if` token.
|
||||
Condition Expression
|
||||
Consequence *BlockStatement
|
||||
Alternative *BlockStatement
|
||||
}
|
||||
|
||||
func (ie *IfExpression) expressionNode() {}
|
||||
func (ie *IfExpression) TokenLiteral() string {
|
||||
return ie.Token.Literal
|
||||
}
|
||||
func (ie *IfExpression) String() string {
|
||||
var out bytes.Buffer
|
||||
out.WriteString("if")
|
||||
out.WriteString(ie.Condition.String())
|
||||
out.WriteString(" ")
|
||||
out.WriteString(ie.Consequence.String())
|
||||
if ie.Alternative != nil {
|
||||
out.WriteString("else ")
|
||||
out.WriteString(ie.Alternative.String())
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
21
pkg/ast/index.go
Normal file
21
pkg/ast/index.go
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type IndexExpression struct {
|
||||
Token token.Token // The "[" token
|
||||
Left Expression
|
||||
Index Expression
|
||||
}
|
||||
|
||||
func (ie *IndexExpression) expressionNode() {}
|
||||
func (ie *IndexExpression) TokenLiteral() string {
|
||||
return ie.Token.Literal
|
||||
}
|
||||
func (ie *IndexExpression) String() string {
|
||||
return fmt.Sprintf("(%s[%s])", ie.Left.String(), ie.Index.String())
|
||||
}
|
||||
18
pkg/ast/infix_expression.go
Normal file
18
pkg/ast/infix_expression.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
type InfixExpression struct {
|
||||
Token token.Token // The operator token
|
||||
Operator string
|
||||
Left Expression
|
||||
Right Expression
|
||||
}
|
||||
|
||||
func (ie *InfixExpression) expressionNode() {}
|
||||
func (ie *InfixExpression) TokenLiteral() string {
|
||||
return ie.Token.Literal
|
||||
}
|
||||
func (ie *InfixExpression) String() string {
|
||||
return "(" + ie.Left.String() + " " + ie.Operator + " " + ie.Right.String() + ")"
|
||||
}
|
||||
16
pkg/ast/integer.go
Normal file
16
pkg/ast/integer.go
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
type IntegerLiteral struct {
|
||||
Token token.Token
|
||||
Value int64
|
||||
}
|
||||
|
||||
func (il *IntegerLiteral) expressionNode() {}
|
||||
func (il *IntegerLiteral) TokenLiteral() string {
|
||||
return il.Token.Literal
|
||||
}
|
||||
func (il *IntegerLiteral) String() string {
|
||||
return il.Token.Literal
|
||||
}
|
||||
28
pkg/ast/let.go
Normal file
28
pkg/ast/let.go
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type LetStatement struct {
|
||||
Token token.Token // TODO: This is a little redundant, figure out if I can get rid of it.
|
||||
Name *Identifier
|
||||
Value Expression
|
||||
}
|
||||
|
||||
func (ls *LetStatement) statementNode() {}
|
||||
func (ls *LetStatement) TokenLiteral() string {
|
||||
return ls.Token.Literal
|
||||
}
|
||||
func (ls *LetStatement) String() string {
|
||||
var out bytes.Buffer
|
||||
out.WriteString(ls.TokenLiteral() + " ")
|
||||
out.WriteString(ls.Name.String() + " = ")
|
||||
if ls.Value != nil {
|
||||
out.WriteString(ls.Value.String())
|
||||
}
|
||||
out.WriteString(";")
|
||||
return out.String()
|
||||
}
|
||||
17
pkg/ast/prefix_expression.go
Normal file
17
pkg/ast/prefix_expression.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
type PrefixExpression struct {
|
||||
Token token.Token // The operator token
|
||||
Operator string
|
||||
Right Expression
|
||||
}
|
||||
|
||||
func (pe *PrefixExpression) expressionNode() {}
|
||||
func (pe *PrefixExpression) TokenLiteral() string {
|
||||
return pe.Token.Literal
|
||||
}
|
||||
func (pe *PrefixExpression) String() string {
|
||||
return "(" + pe.Operator + pe.Right.String() + ")"
|
||||
}
|
||||
26
pkg/ast/return.go
Normal file
26
pkg/ast/return.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type ReturnStatement struct {
|
||||
Token token.Token // TODO: This is a little redundant, figure out if I can get rid of it.
|
||||
ReturnValue Expression
|
||||
}
|
||||
|
||||
func (rs *ReturnStatement) statementNode() {}
|
||||
func (rs *ReturnStatement) TokenLiteral() string {
|
||||
return rs.Token.Literal
|
||||
}
|
||||
func (rs *ReturnStatement) String() string {
|
||||
var out bytes.Buffer
|
||||
out.WriteString(rs.TokenLiteral())
|
||||
if rs.ReturnValue != nil {
|
||||
out.WriteString(" " + rs.ReturnValue.String())
|
||||
}
|
||||
out.WriteString(";")
|
||||
return out.String()
|
||||
}
|
||||
16
pkg/ast/string.go
Normal file
16
pkg/ast/string.go
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
package ast
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
type StringLiteral struct {
|
||||
Token token.Token
|
||||
Value string
|
||||
}
|
||||
|
||||
func (s *StringLiteral) expressionNode() {}
|
||||
func (s *StringLiteral) TokenLiteral() string {
|
||||
return s.Token.Literal
|
||||
}
|
||||
func (s *StringLiteral) String() string {
|
||||
return s.Token.Literal
|
||||
}
|
||||
121
pkg/evaluator/builtins.go
Normal file
121
pkg/evaluator/builtins.go
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/object"
|
||||
)
|
||||
|
||||
var builtins = map[string]*object.Builtin{
|
||||
"puts": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
for _, arg := range args {
|
||||
fmt.Println(arg.Inspect())
|
||||
}
|
||||
return _NULL
|
||||
},
|
||||
},
|
||||
"len": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 1 {
|
||||
return newError("wrong number of arguments. got=%d, want=1",
|
||||
len(args))
|
||||
}
|
||||
|
||||
switch arg := args[0].(type) {
|
||||
case *object.String:
|
||||
return &object.Integer{Value: int64(len(arg.Value))}
|
||||
case *object.Array:
|
||||
return &object.Integer{Value: int64(len(arg.Elements))}
|
||||
default:
|
||||
return newError("argument to `len` not supported, got %s",
|
||||
args[0].Type())
|
||||
}
|
||||
},
|
||||
},
|
||||
"first": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 1 {
|
||||
return newError("wrong number of arguments. got=%d, want=1",
|
||||
len(args))
|
||||
}
|
||||
if args[0].Type() != object.ARRAY_OBJ {
|
||||
return newError("argument to `first` must be ARRAY, got %s",
|
||||
args[0].Type())
|
||||
}
|
||||
|
||||
arr := args[0].(*object.Array)
|
||||
if len(arr.Elements) > 0 {
|
||||
return arr.Elements[0]
|
||||
}
|
||||
|
||||
return _NULL
|
||||
},
|
||||
},
|
||||
"last": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 1 {
|
||||
return newError("wrong number of arguments. got=%d, want=1",
|
||||
len(args))
|
||||
}
|
||||
if args[0].Type() != object.ARRAY_OBJ {
|
||||
return newError("argument to `last` must be ARRAY, got %s",
|
||||
args[0].Type())
|
||||
}
|
||||
|
||||
arr := args[0].(*object.Array)
|
||||
if len(arr.Elements) > 0 {
|
||||
return arr.Elements[len(arr.Elements)-1]
|
||||
}
|
||||
return _NULL
|
||||
},
|
||||
},
|
||||
"rest": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 1 {
|
||||
return newError("wrong number of arguments. got=%d, want=1",
|
||||
len(args))
|
||||
}
|
||||
if args[0].Type() != object.ARRAY_OBJ {
|
||||
return newError("argument to `rest` must be ARRAY, got %s",
|
||||
args[0].Type())
|
||||
}
|
||||
arr := args[0].(*object.Array).Elements
|
||||
arrLen := len(arr)
|
||||
if arrLen > 0 {
|
||||
newArr := make([]object.Object, arrLen-1)
|
||||
copy(newArr, arr[1:])
|
||||
return &object.Array{Elements: newArr}
|
||||
}
|
||||
return _NULL
|
||||
},
|
||||
},
|
||||
"push": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 2 {
|
||||
return newError("wrong number of arguments. got=%d, want=2",
|
||||
len(args))
|
||||
}
|
||||
if args[0].Type() != object.ARRAY_OBJ {
|
||||
return newError("argument to `push` must be ARRAY, got %s",
|
||||
args[0].Type())
|
||||
}
|
||||
arr := args[0].(*object.Array).Elements
|
||||
arrLen := len(arr)
|
||||
newArr := make([]object.Object, arrLen+1)
|
||||
copy(newArr, arr)
|
||||
newArr[arrLen] = args[1]
|
||||
return &object.Array{Elements: newArr}
|
||||
},
|
||||
},
|
||||
"exit": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 0 {
|
||||
return newError("exit takes no arguments...")
|
||||
}
|
||||
os.Exit(0)
|
||||
return nil // Make the compiler happy.
|
||||
},
|
||||
},
|
||||
}
|
||||
358
pkg/evaluator/evaluator.go
Normal file
358
pkg/evaluator/evaluator.go
Normal file
|
|
@ -0,0 +1,358 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/ast"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/object"
|
||||
)
|
||||
|
||||
var (
|
||||
_NULL = &object.Null{}
|
||||
_TRUE = &object.Boolean{Value: true}
|
||||
_FALSE = &object.Boolean{Value: false}
|
||||
)
|
||||
|
||||
func Eval(node ast.Node, env *object.Environment) object.Object {
|
||||
switch node := node.(type) {
|
||||
// Statements.
|
||||
case *ast.Program:
|
||||
return evalProgram(node.Statements, env)
|
||||
case *ast.ExpressionStatement:
|
||||
return Eval(node.Expression, env)
|
||||
// Expressions.
|
||||
case *ast.IntegerLiteral:
|
||||
return &object.Integer{Value: node.Value}
|
||||
case *ast.Boolean:
|
||||
return nativeBoolToBooleanObject(node.Value)
|
||||
case *ast.StringLiteral:
|
||||
return &object.String{Value: node.Value}
|
||||
case *ast.PrefixExpression:
|
||||
right := Eval(node.Right, env)
|
||||
if isError(right) {
|
||||
return right
|
||||
}
|
||||
return evalPrefixExpression(node.Operator, right)
|
||||
case *ast.InfixExpression:
|
||||
left := Eval(node.Left, env)
|
||||
if isError(left) {
|
||||
return left
|
||||
}
|
||||
right := Eval(node.Right, env)
|
||||
if isError(right) {
|
||||
return right
|
||||
}
|
||||
return evalInfixExpression(node.Operator, left, right)
|
||||
case *ast.BlockStatement:
|
||||
return evalBlockStatement(node.Statements, env)
|
||||
case *ast.IfExpression:
|
||||
return evalIfExpression(node, env)
|
||||
case *ast.ReturnStatement:
|
||||
ret := Eval(node.ReturnValue, env)
|
||||
if isError(ret) {
|
||||
return ret
|
||||
}
|
||||
return &object.ReturnValue{Value: ret}
|
||||
case *ast.LetStatement:
|
||||
val := Eval(node.Value, env)
|
||||
if isError(val) {
|
||||
return val
|
||||
}
|
||||
env.Set(node.Name.Value, val)
|
||||
case *ast.Identifier:
|
||||
return evalIdentifier(node, env)
|
||||
case *ast.FunctionLiteral:
|
||||
params := node.Parameters
|
||||
body := node.Body
|
||||
return &object.Function{Parameters: params, Body: body, Env: env}
|
||||
case *ast.CallExpression:
|
||||
fn := Eval(node.Function, env)
|
||||
if isError(fn) {
|
||||
return fn
|
||||
}
|
||||
args := evalExpressions(node.Arguments, env)
|
||||
if len(args) == 1 && isError(args[0]) {
|
||||
return args[0]
|
||||
}
|
||||
return applyFunction(fn, args)
|
||||
case *ast.ArrayLiteral:
|
||||
els := evalExpressions(node.Elements, env)
|
||||
if len(els) == 1 && isError(els[0]) {
|
||||
return els[0]
|
||||
}
|
||||
return &object.Array{Elements: els}
|
||||
case *ast.IndexExpression:
|
||||
left := Eval(node.Left, env)
|
||||
if isError(left) {
|
||||
return left
|
||||
}
|
||||
index := Eval(node.Index, env)
|
||||
if isError(index) {
|
||||
return index
|
||||
}
|
||||
return evalIndexExpression(left, index)
|
||||
case *ast.HashLiteral:
|
||||
return evalHashLiteral(node, env)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func evalProgram(stmts []ast.Statement, env *object.Environment) object.Object {
|
||||
var res object.Object
|
||||
for _, stmt := range stmts {
|
||||
res = Eval(stmt, env)
|
||||
switch res := res.(type) {
|
||||
case *object.ReturnValue:
|
||||
return res.Value
|
||||
case *object.Error:
|
||||
return res
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func evalBlockStatement(stmts []ast.Statement, env *object.Environment) object.Object {
|
||||
var res object.Object
|
||||
for _, stmt := range stmts {
|
||||
res = Eval(stmt, env)
|
||||
if res != nil && (res.Type() == object.RETURN_VALUE_OBJ || res.Type() == object.ERROR_OBJ) {
|
||||
return res
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func evalPrefixExpression(op string, right object.Object) object.Object {
|
||||
switch op {
|
||||
case "!":
|
||||
return evalBangOperatorExpression(right)
|
||||
case "-":
|
||||
return evalMinusPrefixOperatorExpression(right)
|
||||
default:
|
||||
return newError("unknown operator: %s%s", op, right.Type())
|
||||
}
|
||||
}
|
||||
|
||||
func evalBangOperatorExpression(obj object.Object) object.Object {
|
||||
switch obj {
|
||||
case _TRUE:
|
||||
return _FALSE
|
||||
case _FALSE:
|
||||
return _TRUE
|
||||
case _NULL:
|
||||
return _TRUE
|
||||
default:
|
||||
return _FALSE
|
||||
}
|
||||
}
|
||||
|
||||
func evalMinusPrefixOperatorExpression(obj object.Object) object.Object {
|
||||
if obj.Type() != object.INTEGER_OBJ {
|
||||
return newError("unknown operator: -%s", obj.Type())
|
||||
}
|
||||
val := obj.(*object.Integer).Value
|
||||
return &object.Integer{Value: -val}
|
||||
}
|
||||
|
||||
func evalInfixExpression(op string, left, right object.Object) object.Object {
|
||||
switch {
|
||||
case left.Type() == object.INTEGER_OBJ && right.Type() == object.INTEGER_OBJ:
|
||||
return evalIntegerInfixExpression(op, left, right)
|
||||
case left.Type() == object.STRING_OBJ && right.Type() == object.STRING_OBJ:
|
||||
return evalStringInfixExpression(op, left, right)
|
||||
case op == "==":
|
||||
return nativeBoolToBooleanObject(left == right)
|
||||
case op == "!=":
|
||||
return nativeBoolToBooleanObject(left != right)
|
||||
case left.Type() != right.Type():
|
||||
return newError("type mismatch: %s %s %s",
|
||||
left.Type(), op, right.Type())
|
||||
default:
|
||||
return newError("unknown operator: %s %s %s",
|
||||
left.Type(), op, right.Type())
|
||||
}
|
||||
}
|
||||
|
||||
func evalIntegerInfixExpression(op string, left, right object.Object) object.Object {
|
||||
l := left.(*object.Integer).Value
|
||||
r := right.(*object.Integer).Value
|
||||
switch op {
|
||||
case "+":
|
||||
return &object.Integer{Value: l + r}
|
||||
case "-":
|
||||
return &object.Integer{Value: l - r}
|
||||
case "*":
|
||||
return &object.Integer{Value: l * r}
|
||||
case "/":
|
||||
return &object.Integer{Value: l / r}
|
||||
case "<":
|
||||
return nativeBoolToBooleanObject(l < r)
|
||||
case ">":
|
||||
return nativeBoolToBooleanObject(l > r)
|
||||
case "==":
|
||||
return nativeBoolToBooleanObject(l == r)
|
||||
case "!=":
|
||||
return nativeBoolToBooleanObject(l != r)
|
||||
default:
|
||||
return newError("unknown operator: %s %s %s",
|
||||
left.Type(), op, right.Type())
|
||||
}
|
||||
}
|
||||
|
||||
func evalStringInfixExpression(op string, left, right object.Object) object.Object {
|
||||
if op != "+" {
|
||||
return newError(
|
||||
"unknown operator: %s %s %s",
|
||||
left.Type(), op, right.Type(),
|
||||
)
|
||||
}
|
||||
l := left.(*object.String).Value
|
||||
r := right.(*object.String).Value
|
||||
return &object.String{Value: l + r}
|
||||
}
|
||||
|
||||
func evalIfExpression(ifExp *ast.IfExpression, env *object.Environment) object.Object {
|
||||
cond := Eval(ifExp.Condition, env)
|
||||
if isError(cond) {
|
||||
return cond
|
||||
}
|
||||
if isTruthy(cond) {
|
||||
return Eval(ifExp.Consequence, env)
|
||||
} else if ifExp.Alternative != nil {
|
||||
return Eval(ifExp.Alternative, env)
|
||||
}
|
||||
return _NULL
|
||||
}
|
||||
|
||||
func evalIdentifier(exp *ast.Identifier, env *object.Environment) object.Object {
|
||||
if val, ok := env.Get(exp.Value); ok {
|
||||
return val
|
||||
}
|
||||
if val, ok := builtins[exp.Value]; ok {
|
||||
return val
|
||||
}
|
||||
return newError("identifier not found: " + exp.Value)
|
||||
}
|
||||
|
||||
func evalExpressions(
|
||||
exps []ast.Expression,
|
||||
env *object.Environment,
|
||||
) []object.Object {
|
||||
var res []object.Object
|
||||
for _, exp := range exps {
|
||||
ev := Eval(exp, env)
|
||||
if isError(ev) {
|
||||
return []object.Object{ev}
|
||||
}
|
||||
res = append(res, ev)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func evalIndexExpression(left, index object.Object) object.Object {
|
||||
switch {
|
||||
case left.Type() == object.ARRAY_OBJ && index.Type() == object.INTEGER_OBJ:
|
||||
return evalArrayIndexExpression(left, index)
|
||||
case left.Type() == object.HASH_OBJ:
|
||||
return evalHashIndexExpression(left, index)
|
||||
default:
|
||||
return newError("index operator not supported: %s", left.Type())
|
||||
}
|
||||
}
|
||||
|
||||
func evalArrayIndexExpression(arrayObj, indexObj object.Object) object.Object {
|
||||
array := arrayObj.(*object.Array).Elements
|
||||
index := indexObj.(*object.Integer).Value
|
||||
if index < 0 || index >= int64(len(array)) {
|
||||
return _NULL
|
||||
}
|
||||
return array[index]
|
||||
}
|
||||
|
||||
func evalHashIndexExpression(hashObj, index object.Object) object.Object {
|
||||
hash := hashObj.(*object.Hash)
|
||||
hashable, okHash := index.(object.Hashable)
|
||||
if !okHash {
|
||||
return newError("unusable as hash key: %s", index.Type())
|
||||
}
|
||||
pair, okPair := hash.Pairs[hashable.HashKey()]
|
||||
if !okPair {
|
||||
return _NULL
|
||||
}
|
||||
return pair.Value
|
||||
}
|
||||
|
||||
func evalHashLiteral(hash *ast.HashLiteral, env *object.Environment) object.Object {
|
||||
pairs := map[object.HashKey]object.HashPair{}
|
||||
for ke, ve := range hash.Pairs {
|
||||
k := Eval(ke, env)
|
||||
if isError(k) {
|
||||
return k
|
||||
}
|
||||
hashable, ok := k.(object.Hashable)
|
||||
if !ok {
|
||||
return newError("unusable as hash key: %s", k.Type())
|
||||
}
|
||||
v := Eval(ve, env)
|
||||
if isError(v) {
|
||||
return v
|
||||
}
|
||||
hashKey := hashable.HashKey()
|
||||
pairs[hashKey] = object.HashPair{Key: k, Value: v}
|
||||
}
|
||||
return &object.Hash{Pairs: pairs}
|
||||
}
|
||||
|
||||
func applyFunction(fnObj object.Object, args []object.Object) object.Object {
|
||||
switch fn := fnObj.(type) {
|
||||
case *object.Function:
|
||||
env := extendFunctionEnv(fn, args)
|
||||
ret := Eval(fn.Body, env)
|
||||
return unwrapReturnValue(ret)
|
||||
case *object.Builtin:
|
||||
return fn.Fn(args...)
|
||||
}
|
||||
return newError("not a function: %s", fnObj.Type())
|
||||
}
|
||||
|
||||
func extendFunctionEnv(fn *object.Function, args []object.Object) *object.Environment {
|
||||
env := object.NewEnclosedEnvironment(fn.Env)
|
||||
for pi, param := range fn.Parameters {
|
||||
env.Set(param.Value, args[pi])
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
func unwrapReturnValue(obj object.Object) object.Object {
|
||||
if ret, ok := obj.(*object.ReturnValue); ok {
|
||||
return ret.Value
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
func isTruthy(obj object.Object) bool {
|
||||
switch obj {
|
||||
case _TRUE:
|
||||
return true
|
||||
case _FALSE:
|
||||
return false
|
||||
case _NULL:
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func nativeBoolToBooleanObject(b bool) object.Object {
|
||||
if b {
|
||||
return _TRUE
|
||||
}
|
||||
return _FALSE
|
||||
}
|
||||
|
||||
func newError(format string, a ...any) *object.Error {
|
||||
return &object.Error{Message: fmt.Sprintf(format, a...)}
|
||||
}
|
||||
|
||||
func isError(obj object.Object) bool {
|
||||
return obj != nil && obj.Type() == object.ERROR_OBJ
|
||||
}
|
||||
630
pkg/evaluator/evaluator_test.go
Normal file
630
pkg/evaluator/evaluator_test.go
Normal file
|
|
@ -0,0 +1,630 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/lexer"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/object"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/parser"
|
||||
)
|
||||
|
||||
func TestEvalIntegerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected int64
|
||||
}{
|
||||
{"5", 5},
|
||||
{"10", 10},
|
||||
{"-5", -5},
|
||||
{"-10", -10},
|
||||
{"5 + 5 + 5 + 5 - 10", 10},
|
||||
{"2 * 2 * 2 * 2 * 2", 32},
|
||||
{"-50 + 100 + -50", 0},
|
||||
{"5 * 2 + 10", 20},
|
||||
{"5 + 2 * 10", 25},
|
||||
{"20 + 2 * -10", 0},
|
||||
{"50 / 2 * 2 + 10", 60},
|
||||
{"2 * (5 + 10)", 30},
|
||||
{"3 * 3 * 3 + 10", 37},
|
||||
{"3 * (3 * 3) + 10", 37},
|
||||
{"(5 + 10 * 2 + 15 / 3) * 2 + -10", 50},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
testIntegerObject(t, evaluated, tt.expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvalBooleanExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{"true", true},
|
||||
{"false", false},
|
||||
{"1 < 2", true},
|
||||
{"1 > 2", false},
|
||||
{"1 < 1", false},
|
||||
{"1 > 1", false},
|
||||
{"1 == 1", true},
|
||||
{"1 != 1", false},
|
||||
{"1 == 2", false},
|
||||
{"1 != 2", true},
|
||||
{"true == true", true},
|
||||
{"false == false", true},
|
||||
{"true == false", false},
|
||||
{"true != false", true},
|
||||
{"false != true", true},
|
||||
{"(1 < 2) == true", true},
|
||||
{"(1 < 2) == false", false},
|
||||
{"(1 > 2) == true", false},
|
||||
{"(1 > 2) == false", true},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
testBooleanObject(t, evaluated, tt.expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBangOperator(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{"!true", false},
|
||||
{"!false", true},
|
||||
{"!5", false},
|
||||
{"!!true", true},
|
||||
{"!!false", false},
|
||||
{"!!5", true},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
testBooleanObject(t, evaluated, tt.expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIfElseExpressions(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected interface{}
|
||||
}{
|
||||
{"if (true) { 10 }", 10},
|
||||
{"if (false) { 10 }", nil},
|
||||
{"if (1) { 10 }", 10},
|
||||
{"if (1 < 2) { 10 }", 10},
|
||||
{"if (1 > 2) { 10 }", nil},
|
||||
{"if (1 > 2) { 10 } else { 20 }", 20},
|
||||
{"if (1 < 2) { 10 } else { 20 }", 10},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
integer, ok := tt.expected.(int)
|
||||
if ok {
|
||||
testIntegerObject(t, evaluated, int64(integer))
|
||||
} else {
|
||||
testNullObject(t, evaluated)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReturnStatements(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected int64
|
||||
}{
|
||||
{"return 10;", 10},
|
||||
{"return 10; 9;", 10},
|
||||
{"return 2 * 5; 9;", 10},
|
||||
{"9; return 2 * 5; 9;", 10},
|
||||
{"if (10 > 1) { return 10; }", 10},
|
||||
{
|
||||
`
|
||||
if (10 > 1) {
|
||||
if (10 > 1) {
|
||||
return 10;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
`,
|
||||
10,
|
||||
},
|
||||
{
|
||||
`
|
||||
let f = fn(x) {
|
||||
return x;
|
||||
x + 10;
|
||||
};
|
||||
f(10);`,
|
||||
10,
|
||||
},
|
||||
{
|
||||
`
|
||||
let f = fn(x) {
|
||||
let result = x + 10;
|
||||
return result;
|
||||
return 10;
|
||||
};
|
||||
f(10);`,
|
||||
20,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
testIntegerObject(t, evaluated, tt.expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestErrorHandling(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expectedMessage string
|
||||
}{
|
||||
{
|
||||
"5 + true;",
|
||||
"type mismatch: INTEGER + BOOLEAN",
|
||||
},
|
||||
{
|
||||
"5 + true; 5;",
|
||||
"type mismatch: INTEGER + BOOLEAN",
|
||||
},
|
||||
{
|
||||
"-true",
|
||||
"unknown operator: -BOOLEAN",
|
||||
},
|
||||
{
|
||||
"true + false;",
|
||||
"unknown operator: BOOLEAN + BOOLEAN",
|
||||
},
|
||||
{
|
||||
"true + false + true + false;",
|
||||
"unknown operator: BOOLEAN + BOOLEAN",
|
||||
},
|
||||
{
|
||||
"5; true + false; 5",
|
||||
"unknown operator: BOOLEAN + BOOLEAN",
|
||||
},
|
||||
{
|
||||
`"Hello" - "World"`,
|
||||
"unknown operator: STRING - STRING",
|
||||
},
|
||||
{
|
||||
"if (10 > 1) { true + false; }",
|
||||
"unknown operator: BOOLEAN + BOOLEAN",
|
||||
},
|
||||
{
|
||||
`
|
||||
if (10 > 1) {
|
||||
if (10 > 1) {
|
||||
return true + false;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
`,
|
||||
"unknown operator: BOOLEAN + BOOLEAN",
|
||||
},
|
||||
{
|
||||
"foobar",
|
||||
"identifier not found: foobar",
|
||||
},
|
||||
{
|
||||
`{"name": "Monkey"}[fn(x) { x }];`,
|
||||
"unusable as hash key: FUNCTION",
|
||||
},
|
||||
{
|
||||
`999[1]`,
|
||||
"index operator not supported: INTEGER",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
|
||||
errObj, ok := evaluated.(*object.Error)
|
||||
if !ok {
|
||||
t.Errorf("no error object returned. got=%T(%+v)",
|
||||
evaluated, evaluated)
|
||||
continue
|
||||
}
|
||||
|
||||
if errObj.Message != tt.expectedMessage {
|
||||
t.Errorf("wrong error message. expected=%q, got=%q",
|
||||
tt.expectedMessage, errObj.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLetStatements(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected int64
|
||||
}{
|
||||
{"let a = 5; a;", 5},
|
||||
{"let a = 5 * 5; a;", 25},
|
||||
{"let a = 5; let b = a; b;", 5},
|
||||
{"let a = 5; let b = a; let c = a + b + 5; c;", 15},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
testIntegerObject(t, testEval(tt.input), tt.expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFunctionObject(t *testing.T) {
|
||||
input := "fn(x) { x + 2; };"
|
||||
|
||||
evaluated := testEval(input)
|
||||
fn, ok := evaluated.(*object.Function)
|
||||
if !ok {
|
||||
t.Fatalf("object is not Function. got=%T (%+v)", evaluated, evaluated)
|
||||
}
|
||||
|
||||
if len(fn.Parameters) != 1 {
|
||||
t.Fatalf("function has wrong parameters. Parameters=%+v",
|
||||
fn.Parameters)
|
||||
}
|
||||
|
||||
if fn.Parameters[0].String() != "x" {
|
||||
t.Fatalf("parameter is not 'x'. got=%q", fn.Parameters[0])
|
||||
}
|
||||
|
||||
expectedBody := "(x + 2)"
|
||||
|
||||
if fn.Body.String() != expectedBody {
|
||||
t.Fatalf("body is not %q. got=%q", expectedBody, fn.Body.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestFunctionApplication(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected int64
|
||||
}{
|
||||
{"let identity = fn(x) { x; }; identity(5);", 5},
|
||||
{"let identity = fn(x) { return x; }; identity(5);", 5},
|
||||
{"let double = fn(x) { x * 2; }; double(5);", 10},
|
||||
{"let add = fn(x, y) { x + y; }; add(5, 5);", 10},
|
||||
{"let add = fn(x, y) { x + y; }; add(5 + 5, add(5, 5));", 20},
|
||||
{"fn(x) { x; }(5)", 5},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
testIntegerObject(t, testEval(tt.input), tt.expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnclosingEnvironments(t *testing.T) {
|
||||
input := `
|
||||
let first = 10;
|
||||
let second = 10;
|
||||
let third = 10;
|
||||
|
||||
let ourFunction = fn(first) {
|
||||
let second = 20;
|
||||
|
||||
first + second + third;
|
||||
};
|
||||
|
||||
ourFunction(20) + first + second;`
|
||||
|
||||
testIntegerObject(t, testEval(input), 70)
|
||||
}
|
||||
|
||||
func TestClosures(t *testing.T) {
|
||||
input := `
|
||||
let newAdder = fn(x) {
|
||||
fn(y) { x + y };
|
||||
};
|
||||
|
||||
let addTwo = newAdder(2);
|
||||
addTwo(2);`
|
||||
|
||||
testIntegerObject(t, testEval(input), 4)
|
||||
}
|
||||
|
||||
func TestStringLiteral(t *testing.T) {
|
||||
input := `"Hello World!"`
|
||||
|
||||
evaluated := testEval(input)
|
||||
str, ok := evaluated.(*object.String)
|
||||
if !ok {
|
||||
t.Fatalf("object is not String. got=%T (%+v)", evaluated, evaluated)
|
||||
}
|
||||
|
||||
if str.Value != "Hello World!" {
|
||||
t.Errorf("String has wrong value. got=%q", str.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringConcatenation(t *testing.T) {
|
||||
input := `"Hello" + " " + "World!"`
|
||||
|
||||
evaluated := testEval(input)
|
||||
str, ok := evaluated.(*object.String)
|
||||
if !ok {
|
||||
t.Fatalf("object is not String. got=%T (%+v)", evaluated, evaluated)
|
||||
}
|
||||
|
||||
if str.Value != "Hello World!" {
|
||||
t.Errorf("String has wrong value. got=%q", str.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuiltinFunctions(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected interface{}
|
||||
}{
|
||||
{`len("")`, 0},
|
||||
{`len("four")`, 4},
|
||||
{`len("hello world")`, 11},
|
||||
{`len(1)`, "argument to `len` not supported, got INTEGER"},
|
||||
{`len("one", "two")`, "wrong number of arguments. got=2, want=1"},
|
||||
{`len([1, 2, 3])`, 3},
|
||||
{`len([])`, 0},
|
||||
{`puts("hello", "world!")`, nil},
|
||||
{`first([1, 2, 3])`, 1},
|
||||
{`first([])`, nil},
|
||||
{`first(1)`, "argument to `first` must be ARRAY, got INTEGER"},
|
||||
{`last([1, 2, 3])`, 3},
|
||||
{`last([])`, nil},
|
||||
{`last(1)`, "argument to `last` must be ARRAY, got INTEGER"},
|
||||
{`rest([1, 2, 3])`, []int{2, 3}},
|
||||
{`rest([])`, nil},
|
||||
{`push([], 1)`, []int{1}},
|
||||
{`push(1, 1)`, "argument to `push` must be ARRAY, got INTEGER"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
|
||||
switch expected := tt.expected.(type) {
|
||||
case int:
|
||||
testIntegerObject(t, evaluated, int64(expected))
|
||||
case nil:
|
||||
testNullObject(t, evaluated)
|
||||
case string:
|
||||
errObj, ok := evaluated.(*object.Error)
|
||||
if !ok {
|
||||
t.Errorf("object is not Error. got=%T (%+v)",
|
||||
evaluated, evaluated)
|
||||
continue
|
||||
}
|
||||
if errObj.Message != expected {
|
||||
t.Errorf("wrong error message. expected=%q, got=%q",
|
||||
expected, errObj.Message)
|
||||
}
|
||||
case []int:
|
||||
array, ok := evaluated.(*object.Array)
|
||||
if !ok {
|
||||
t.Errorf("obj not Array. got=%T (%+v)", evaluated, evaluated)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(array.Elements) != len(expected) {
|
||||
t.Errorf("wrong num of elements. want=%d, got=%d",
|
||||
len(expected), len(array.Elements))
|
||||
continue
|
||||
}
|
||||
|
||||
for i, expectedElem := range expected {
|
||||
testIntegerObject(t, array.Elements[i], int64(expectedElem))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestArrayLiterals(t *testing.T) {
|
||||
input := "[1, 2 * 2, 3 + 3]"
|
||||
|
||||
evaluated := testEval(input)
|
||||
result, ok := evaluated.(*object.Array)
|
||||
if !ok {
|
||||
t.Fatalf("object is not Array. got=%T (%+v)", evaluated, evaluated)
|
||||
}
|
||||
|
||||
if len(result.Elements) != 3 {
|
||||
t.Fatalf("array has wrong num of elements. got=%d",
|
||||
len(result.Elements))
|
||||
}
|
||||
|
||||
testIntegerObject(t, result.Elements[0], 1)
|
||||
testIntegerObject(t, result.Elements[1], 4)
|
||||
testIntegerObject(t, result.Elements[2], 6)
|
||||
}
|
||||
|
||||
func TestArrayIndexExpressions(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
"[1, 2, 3][0]",
|
||||
1,
|
||||
},
|
||||
{
|
||||
"[1, 2, 3][1]",
|
||||
2,
|
||||
},
|
||||
{
|
||||
"[1, 2, 3][2]",
|
||||
3,
|
||||
},
|
||||
{
|
||||
"let i = 0; [1][i];",
|
||||
1,
|
||||
},
|
||||
{
|
||||
"[1, 2, 3][1 + 1];",
|
||||
3,
|
||||
},
|
||||
{
|
||||
"let myArray = [1, 2, 3]; myArray[2];",
|
||||
3,
|
||||
},
|
||||
{
|
||||
"let myArray = [1, 2, 3]; myArray[0] + myArray[1] + myArray[2];",
|
||||
6,
|
||||
},
|
||||
{
|
||||
"let myArray = [1, 2, 3]; let i = myArray[0]; myArray[i]",
|
||||
2,
|
||||
},
|
||||
{
|
||||
"[1, 2, 3][3]",
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"[1, 2, 3][-1]",
|
||||
nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
integer, ok := tt.expected.(int)
|
||||
if ok {
|
||||
testIntegerObject(t, evaluated, int64(integer))
|
||||
} else {
|
||||
testNullObject(t, evaluated)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestHashLiterals(t *testing.T) {
|
||||
input := `let two = "two";
|
||||
{
|
||||
"one": 10 - 9,
|
||||
two: 1 + 1,
|
||||
"thr" + "ee": 6 / 2,
|
||||
4: 4,
|
||||
true: 5,
|
||||
false: 6
|
||||
}`
|
||||
|
||||
evaluated := testEval(input)
|
||||
result, ok := evaluated.(*object.Hash)
|
||||
if !ok {
|
||||
t.Fatalf("Eval didn't return Hash. got=%T (%+v)", evaluated, evaluated)
|
||||
}
|
||||
|
||||
expected := map[object.HashKey]int64{
|
||||
(&object.String{Value: "one"}).HashKey(): 1,
|
||||
(&object.String{Value: "two"}).HashKey(): 2,
|
||||
(&object.String{Value: "three"}).HashKey(): 3,
|
||||
(&object.Integer{Value: 4}).HashKey(): 4,
|
||||
_TRUE.HashKey(): 5,
|
||||
_FALSE.HashKey(): 6,
|
||||
}
|
||||
|
||||
if len(result.Pairs) != len(expected) {
|
||||
t.Fatalf("Hash has wrong num of pairs. got=%d", len(result.Pairs))
|
||||
}
|
||||
|
||||
for expectedKey, expectedValue := range expected {
|
||||
pair, ok := result.Pairs[expectedKey]
|
||||
if !ok {
|
||||
t.Errorf("no pair for given key in Pairs")
|
||||
}
|
||||
|
||||
testIntegerObject(t, pair.Value, expectedValue)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHashIndexExpressions(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
`{"foo": 5}["foo"]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{"foo": 5}["bar"]`,
|
||||
nil,
|
||||
},
|
||||
{
|
||||
`let key = "foo"; {"foo": 5}[key]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{}["foo"]`,
|
||||
nil,
|
||||
},
|
||||
{
|
||||
`{5: 5}[5]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{true: 5}[true]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{false: 5}[false]`,
|
||||
5,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
integer, ok := tt.expected.(int)
|
||||
if ok {
|
||||
testIntegerObject(t, evaluated, int64(integer))
|
||||
} else {
|
||||
testNullObject(t, evaluated)
|
||||
}
|
||||
}
|
||||
}
|
||||
func testEval(input string) object.Object {
|
||||
l := lexer.New(input)
|
||||
p := parser.New(l)
|
||||
program := p.ParseProgram()
|
||||
env := object.NewEnvironment()
|
||||
|
||||
return Eval(program, env)
|
||||
}
|
||||
|
||||
func testIntegerObject(t *testing.T, obj object.Object, expected int64) bool {
|
||||
result, ok := obj.(*object.Integer)
|
||||
if !ok {
|
||||
t.Errorf("object is not Integer. got=%T (%+v)", obj, obj)
|
||||
return false
|
||||
}
|
||||
if result.Value != expected {
|
||||
t.Errorf("object has wrong value. got=%d, want=%d",
|
||||
result.Value, expected)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func testBooleanObject(t *testing.T, obj object.Object, expected bool) bool {
|
||||
result, ok := obj.(*object.Boolean)
|
||||
if !ok {
|
||||
t.Errorf("object is not Boolean. got=%T (%+v)", obj, obj)
|
||||
return false
|
||||
}
|
||||
if result.Value != expected {
|
||||
t.Errorf("object has wrong value. got=%t, want=%t",
|
||||
result.Value, expected)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func testNullObject(t *testing.T, obj object.Object) bool {
|
||||
if obj != _NULL {
|
||||
t.Errorf("object is not NULL. got=%T (%+v)", obj, obj)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
160
pkg/lexer/lexer.go
Normal file
160
pkg/lexer/lexer.go
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
package lexer
|
||||
|
||||
import "code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
|
||||
type Lexer struct {
|
||||
input string
|
||||
position int
|
||||
readPosition int
|
||||
ch byte
|
||||
}
|
||||
|
||||
func New(input string) *Lexer {
|
||||
l := &Lexer{input: input}
|
||||
l.readChar()
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *Lexer) NextToken() token.Token {
|
||||
l.skipWhitespace()
|
||||
var tok token.Token
|
||||
switch l.ch {
|
||||
case '=':
|
||||
if l.peekChar() == '=' {
|
||||
ch := l.ch
|
||||
l.readChar()
|
||||
literal := string(ch) + string(l.ch)
|
||||
tok.Type = token.EQ
|
||||
tok.Literal = literal
|
||||
} else {
|
||||
tok = newToken(token.ASSIGN, l.ch)
|
||||
}
|
||||
case '+':
|
||||
tok = newToken(token.PLUS, l.ch)
|
||||
case '-':
|
||||
tok = newToken(token.MINUS, l.ch)
|
||||
case '!':
|
||||
if l.peekChar() == '=' {
|
||||
ch := l.ch
|
||||
l.readChar()
|
||||
literal := string(ch) + string(l.ch)
|
||||
tok.Type = token.NOT_EQ
|
||||
tok.Literal = literal
|
||||
} else {
|
||||
tok = newToken(token.BANG, l.ch)
|
||||
}
|
||||
case '*':
|
||||
tok = newToken(token.ASTERISK, l.ch)
|
||||
case '/':
|
||||
tok = newToken(token.SLASH, l.ch)
|
||||
case '<':
|
||||
tok = newToken(token.LT, l.ch)
|
||||
case '>':
|
||||
tok = newToken(token.GT, l.ch)
|
||||
case ',':
|
||||
tok = newToken(token.COMMA, l.ch)
|
||||
case ';':
|
||||
tok = newToken(token.SEMICOLON, l.ch)
|
||||
case ':':
|
||||
tok = newToken(token.COLON, l.ch)
|
||||
case '(':
|
||||
tok = newToken(token.LPAREN, l.ch)
|
||||
case ')':
|
||||
tok = newToken(token.RPAREN, l.ch)
|
||||
case '{':
|
||||
tok = newToken(token.LBRACE, l.ch)
|
||||
case '}':
|
||||
tok = newToken(token.RBRACE, l.ch)
|
||||
case '[':
|
||||
tok = newToken(token.LBRACKET, l.ch)
|
||||
case ']':
|
||||
tok = newToken(token.RBRACKET, l.ch)
|
||||
case '"':
|
||||
tok.Type = token.STRING
|
||||
tok.Literal = l.readString()
|
||||
case 0:
|
||||
tok.Literal = ""
|
||||
tok.Type = token.EOF
|
||||
default:
|
||||
if isLetter(l.ch) {
|
||||
tok.Literal = l.readIdentifier()
|
||||
tok.Type = token.LookupIdent(tok.Literal)
|
||||
// Don't let it fall through because readIdentifier calls readChar.
|
||||
return tok
|
||||
} else if isDigit(l.ch) {
|
||||
tok.Literal = l.readNumber()
|
||||
tok.Type = token.INT
|
||||
// Don't let it fall through because readNumber calls readChar.
|
||||
return tok
|
||||
} else {
|
||||
tok = newToken(token.ILLEGAL, l.ch)
|
||||
}
|
||||
}
|
||||
l.readChar()
|
||||
return tok
|
||||
}
|
||||
|
||||
func (l *Lexer) readChar() {
|
||||
if l.readPosition >= len(l.input) {
|
||||
l.ch = 0
|
||||
} else {
|
||||
l.ch = l.input[l.readPosition]
|
||||
}
|
||||
l.position = l.readPosition
|
||||
l.readPosition += 1
|
||||
}
|
||||
|
||||
func (l *Lexer) peekChar() byte {
|
||||
if l.readPosition >= len(l.input) {
|
||||
return 0
|
||||
}
|
||||
return l.input[l.readPosition]
|
||||
}
|
||||
|
||||
func (l *Lexer) readIdentifier() string {
|
||||
position := l.position
|
||||
for isLetter(l.ch) {
|
||||
l.readChar()
|
||||
}
|
||||
// Slicing until l.position instead of readPosition because the last read
|
||||
// char was not a letter.
|
||||
return l.input[position:l.position]
|
||||
}
|
||||
|
||||
func (l *Lexer) readNumber() string {
|
||||
position := l.position
|
||||
for isDigit(l.ch) {
|
||||
l.readChar()
|
||||
}
|
||||
// Slicing until l.position instead of readPosition because the last read
|
||||
// char was not a letter.
|
||||
return l.input[position:l.position]
|
||||
}
|
||||
|
||||
func (l *Lexer) readString() string {
|
||||
// Don't include the quotes in the literal.
|
||||
position := l.position + 1
|
||||
l.readChar()
|
||||
for l.ch != '"' && l.ch != 0 {
|
||||
l.readChar()
|
||||
}
|
||||
return l.input[position:l.position]
|
||||
}
|
||||
|
||||
func (l *Lexer) skipWhitespace() {
|
||||
for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
|
||||
l.readChar()
|
||||
}
|
||||
}
|
||||
|
||||
func newToken(tokenType token.TokenType, ch byte) token.Token {
|
||||
return token.Token{Type: tokenType, Literal: string(ch)}
|
||||
}
|
||||
|
||||
func isLetter(ch byte) bool {
|
||||
return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') || ch == '_'
|
||||
}
|
||||
|
||||
func isDigit(ch byte) bool {
|
||||
return '0' <= ch && ch <= '9'
|
||||
}
|
||||
144
pkg/lexer/lexer_test.go
Normal file
144
pkg/lexer/lexer_test.go
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
package lexer
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
func TestNextToken(t *testing.T) {
|
||||
input := `let five = 5;
|
||||
let ten = 10;
|
||||
|
||||
let add = fn(x, y) {
|
||||
x + y;
|
||||
};
|
||||
|
||||
let result = add(five, ten);
|
||||
!-/*5;
|
||||
5 < 10 > 5;
|
||||
|
||||
if (5 < 10) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
10 == 10;
|
||||
10 != 9;
|
||||
"foobar"
|
||||
"foo bar"
|
||||
[1, 2];
|
||||
{1: 2};
|
||||
`
|
||||
|
||||
tests := []struct {
|
||||
expectedType token.TokenType
|
||||
expectedLiteral string
|
||||
}{
|
||||
{token.LET, "let"},
|
||||
{token.IDENT, "five"},
|
||||
{token.ASSIGN, "="},
|
||||
{token.INT, "5"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.LET, "let"},
|
||||
{token.IDENT, "ten"},
|
||||
{token.ASSIGN, "="},
|
||||
{token.INT, "10"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.LET, "let"},
|
||||
{token.IDENT, "add"},
|
||||
{token.ASSIGN, "="},
|
||||
{token.FUNCTION, "fn"},
|
||||
{token.LPAREN, "("},
|
||||
{token.IDENT, "x"},
|
||||
{token.COMMA, ","},
|
||||
{token.IDENT, "y"},
|
||||
{token.RPAREN, ")"},
|
||||
{token.LBRACE, "{"},
|
||||
{token.IDENT, "x"},
|
||||
{token.PLUS, "+"},
|
||||
{token.IDENT, "y"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.RBRACE, "}"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.LET, "let"},
|
||||
{token.IDENT, "result"},
|
||||
{token.ASSIGN, "="},
|
||||
{token.IDENT, "add"},
|
||||
{token.LPAREN, "("},
|
||||
{token.IDENT, "five"},
|
||||
{token.COMMA, ","},
|
||||
{token.IDENT, "ten"},
|
||||
{token.RPAREN, ")"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.BANG, "!"},
|
||||
{token.MINUS, "-"},
|
||||
{token.SLASH, "/"},
|
||||
{token.ASTERISK, "*"},
|
||||
{token.INT, "5"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.INT, "5"},
|
||||
{token.LT, "<"},
|
||||
{token.INT, "10"},
|
||||
{token.GT, ">"},
|
||||
{token.INT, "5"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.IF, "if"},
|
||||
{token.LPAREN, "("},
|
||||
{token.INT, "5"},
|
||||
{token.LT, "<"},
|
||||
{token.INT, "10"},
|
||||
{token.RPAREN, ")"},
|
||||
{token.LBRACE, "{"},
|
||||
{token.RETURN, "return"},
|
||||
{token.TRUE, "true"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.RBRACE, "}"},
|
||||
{token.ELSE, "else"},
|
||||
{token.LBRACE, "{"},
|
||||
{token.RETURN, "return"},
|
||||
{token.FALSE, "false"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.RBRACE, "}"},
|
||||
{token.INT, "10"},
|
||||
{token.EQ, "=="},
|
||||
{token.INT, "10"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.INT, "10"},
|
||||
{token.NOT_EQ, "!="},
|
||||
{token.INT, "9"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.STRING, "foobar"},
|
||||
{token.STRING, "foo bar"},
|
||||
{token.LBRACKET, "["},
|
||||
{token.INT, "1"},
|
||||
{token.COMMA, ","},
|
||||
{token.INT, "2"},
|
||||
{token.RBRACKET, "]"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.LBRACE, "{"},
|
||||
{token.INT, "1"},
|
||||
{token.COLON, ":"},
|
||||
{token.INT, "2"},
|
||||
{token.RBRACE, "}"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.EOF, ""},
|
||||
}
|
||||
|
||||
l := New(input)
|
||||
|
||||
for i, tt := range tests {
|
||||
tok := l.NextToken()
|
||||
|
||||
if tok.Type != tt.expectedType {
|
||||
t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
|
||||
i, tt.expectedType, tok.Type)
|
||||
}
|
||||
|
||||
if tok.Literal != tt.expectedLiteral {
|
||||
t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
|
||||
i, tt.expectedLiteral, tok.Literal)
|
||||
}
|
||||
}
|
||||
}
|
||||
30
pkg/object/environment.go
Normal file
30
pkg/object/environment.go
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
package object
|
||||
|
||||
func NewEnvironment() *Environment {
|
||||
return &Environment{store: map[string]Object{}}
|
||||
}
|
||||
|
||||
func NewEnclosedEnvironment(outer *Environment) *Environment {
|
||||
return &Environment{
|
||||
store: map[string]Object{},
|
||||
outer: outer,
|
||||
}
|
||||
}
|
||||
|
||||
type Environment struct {
|
||||
store map[string]Object
|
||||
outer *Environment
|
||||
}
|
||||
|
||||
func (e *Environment) Get(name string) (Object, bool) {
|
||||
obj, ok := e.store[name]
|
||||
if !ok && e.outer != nil {
|
||||
obj, ok = e.outer.Get(name)
|
||||
}
|
||||
return obj, ok
|
||||
}
|
||||
|
||||
func (e *Environment) Set(name string, obj Object) Object {
|
||||
e.store[name] = obj
|
||||
return obj
|
||||
}
|
||||
30
pkg/object/hash_key.go
Normal file
30
pkg/object/hash_key.go
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
package object
|
||||
|
||||
import "hash/fnv"
|
||||
|
||||
type Hashable interface {
|
||||
HashKey() HashKey
|
||||
}
|
||||
|
||||
type HashKey struct {
|
||||
Type ObjectType
|
||||
Value uint64
|
||||
}
|
||||
|
||||
func (b *Boolean) HashKey() HashKey {
|
||||
var val uint64 = 0
|
||||
if b.Value {
|
||||
val = 1
|
||||
}
|
||||
return HashKey{Type: b.Type(), Value: val}
|
||||
}
|
||||
|
||||
func (i *Integer) HashKey() HashKey {
|
||||
return HashKey{Type: i.Type(), Value: uint64(i.Value)}
|
||||
}
|
||||
|
||||
func (s *String) HashKey() HashKey {
|
||||
h := fnv.New64()
|
||||
h.Write([]byte(s.Value))
|
||||
return HashKey{Type: s.Type(), Value: h.Sum64()}
|
||||
}
|
||||
164
pkg/object/object.go
Normal file
164
pkg/object/object.go
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
package object
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/ast"
|
||||
)
|
||||
|
||||
type ObjectType string
|
||||
|
||||
const (
|
||||
INTEGER_OBJ = "INTEGER"
|
||||
BOOLEAN_OBJ = "BOOLEAN"
|
||||
NULL_OBJ = "NULL"
|
||||
RETURN_VALUE_OBJ = "RETURN"
|
||||
ERROR_OBJ = "ERROR"
|
||||
FUNCTION_OBJ = "FUNCTION"
|
||||
STRING_OBJ = "STRING"
|
||||
BUILTIN_OBJ = "BUILTIN"
|
||||
ARRAY_OBJ = "ARRAY"
|
||||
HASH_OBJ = "HASH"
|
||||
)
|
||||
|
||||
type Object interface {
|
||||
Type() ObjectType
|
||||
Inspect() string
|
||||
}
|
||||
|
||||
type Integer struct {
|
||||
Value int64
|
||||
}
|
||||
|
||||
func (i *Integer) Type() ObjectType {
|
||||
return INTEGER_OBJ
|
||||
}
|
||||
func (i *Integer) Inspect() string {
|
||||
return fmt.Sprintf("%d", i.Value)
|
||||
}
|
||||
|
||||
type Boolean struct {
|
||||
Value bool
|
||||
}
|
||||
|
||||
func (b *Boolean) Type() ObjectType {
|
||||
return BOOLEAN_OBJ
|
||||
}
|
||||
func (b *Boolean) Inspect() string {
|
||||
return fmt.Sprintf("%t", b.Value)
|
||||
}
|
||||
|
||||
type Null struct{}
|
||||
|
||||
func (n *Null) Type() ObjectType {
|
||||
return NULL_OBJ
|
||||
}
|
||||
func (n *Null) Inspect() string {
|
||||
return "null"
|
||||
}
|
||||
|
||||
type ReturnValue struct {
|
||||
Value Object
|
||||
}
|
||||
|
||||
func (rv *ReturnValue) Type() ObjectType {
|
||||
return RETURN_VALUE_OBJ
|
||||
}
|
||||
func (rv *ReturnValue) Inspect() string {
|
||||
return rv.Value.Inspect()
|
||||
}
|
||||
|
||||
type Error struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e *Error) Type() ObjectType {
|
||||
return ERROR_OBJ
|
||||
}
|
||||
func (e *Error) Inspect() string {
|
||||
return "ERROR: " + e.Message
|
||||
}
|
||||
|
||||
type Function struct {
|
||||
Parameters []*ast.Identifier
|
||||
Body *ast.BlockStatement
|
||||
Env *Environment
|
||||
}
|
||||
|
||||
func (f *Function) Type() ObjectType {
|
||||
return FUNCTION_OBJ
|
||||
}
|
||||
func (f *Function) Inspect() string {
|
||||
var out bytes.Buffer
|
||||
params := []string{}
|
||||
for _, p := range f.Parameters {
|
||||
params = append(params, p.Value)
|
||||
}
|
||||
out.WriteString("fn")
|
||||
out.WriteString("(" + strings.Join(params, ", ") + ")")
|
||||
out.WriteString(" {\n" + f.Body.String() + "\n}")
|
||||
return out.String()
|
||||
}
|
||||
|
||||
type String struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
func (s *String) Type() ObjectType {
|
||||
return STRING_OBJ
|
||||
}
|
||||
func (s *String) Inspect() string {
|
||||
return s.Value
|
||||
}
|
||||
|
||||
type BuiltinFunction func(args ...Object) Object
|
||||
type Builtin struct {
|
||||
Fn BuiltinFunction
|
||||
}
|
||||
|
||||
func (b *Builtin) Type() ObjectType {
|
||||
return BUILTIN_OBJ
|
||||
}
|
||||
func (b *Builtin) Inspect() string {
|
||||
return "builtin function"
|
||||
}
|
||||
|
||||
type Array struct {
|
||||
Elements []Object
|
||||
}
|
||||
|
||||
func (a *Array) Type() ObjectType {
|
||||
return ARRAY_OBJ
|
||||
}
|
||||
func (a *Array) Inspect() string {
|
||||
elements := []string{}
|
||||
for _, el := range a.Elements {
|
||||
elements = append(elements, el.Inspect())
|
||||
}
|
||||
return fmt.Sprintf("[%s]", strings.Join(elements, ", "))
|
||||
}
|
||||
|
||||
type HashPair struct {
|
||||
Key Object
|
||||
Value Object
|
||||
}
|
||||
|
||||
type Hash struct {
|
||||
Pairs map[HashKey]HashPair
|
||||
}
|
||||
|
||||
func (h *Hash) Type() ObjectType {
|
||||
return HASH_OBJ
|
||||
}
|
||||
func (h *Hash) Inspect() string {
|
||||
pairs := []string{}
|
||||
for _, p := range h.Pairs {
|
||||
pairs = append(
|
||||
pairs,
|
||||
fmt.Sprintf("%s: %s", p.Key.Inspect(), p.Value.Inspect()),
|
||||
)
|
||||
}
|
||||
return "{" + strings.Join(pairs, ", ") + "}"
|
||||
}
|
||||
22
pkg/object/object_test.go
Normal file
22
pkg/object/object_test.go
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
package object
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestStringHashKey(t *testing.T) {
|
||||
hello1 := &String{Value: "Hello World"}
|
||||
hello2 := &String{Value: "Hello World"}
|
||||
diff1 := &String{Value: "My name is johnny"}
|
||||
diff2 := &String{Value: "My name is johnny"}
|
||||
|
||||
if hello1.HashKey() != hello2.HashKey() {
|
||||
t.Errorf("strings with same content have different hash keys")
|
||||
}
|
||||
|
||||
if diff1.HashKey() != diff2.HashKey() {
|
||||
t.Errorf("strings with same content have different hash keys")
|
||||
}
|
||||
|
||||
if hello1.HashKey() == diff1.HashKey() {
|
||||
t.Errorf("strings with different content have same hash keys")
|
||||
}
|
||||
}
|
||||
408
pkg/parser/parser.go
Normal file
408
pkg/parser/parser.go
Normal file
|
|
@ -0,0 +1,408 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/ast"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/lexer"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type (
|
||||
prefixParseFn func() ast.Expression
|
||||
infixParseFn func(ast.Expression) ast.Expression
|
||||
)
|
||||
|
||||
type Parser struct {
|
||||
l *lexer.Lexer
|
||||
errors []string
|
||||
curToken token.Token
|
||||
peekToken token.Token
|
||||
prefixParseFns map[token.TokenType]prefixParseFn
|
||||
infixParseFns map[token.TokenType]infixParseFn
|
||||
}
|
||||
|
||||
func New(l *lexer.Lexer) *Parser {
|
||||
p := &Parser{
|
||||
l: l,
|
||||
errors: []string{},
|
||||
prefixParseFns: map[token.TokenType]prefixParseFn{},
|
||||
infixParseFns: map[token.TokenType]infixParseFn{},
|
||||
}
|
||||
// Prefix registrations
|
||||
p.registerPrefix(token.IDENT, p.parseIdentifier)
|
||||
p.registerPrefix(token.INT, p.parseIntegerLiteral)
|
||||
p.registerPrefix(token.MINUS, p.parsePrefixExpression)
|
||||
p.registerPrefix(token.BANG, p.parsePrefixExpression)
|
||||
p.registerPrefix(token.TRUE, p.parseBoolean)
|
||||
p.registerPrefix(token.FALSE, p.parseBoolean)
|
||||
p.registerPrefix(token.LPAREN, p.parseGroupedExpression)
|
||||
p.registerPrefix(token.IF, p.parseIfExpression)
|
||||
p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
|
||||
p.registerPrefix(token.STRING, p.parseStringLiteral)
|
||||
p.registerPrefix(token.LBRACKET, p.parseArrayLiteral)
|
||||
p.registerPrefix(token.LBRACE, p.parseHashLiteral)
|
||||
// Infix registrations
|
||||
p.registerInfix(token.PLUS, p.parseInfixExpression)
|
||||
p.registerInfix(token.MINUS, p.parseInfixExpression)
|
||||
p.registerInfix(token.ASTERISK, p.parseInfixExpression)
|
||||
p.registerInfix(token.SLASH, p.parseInfixExpression)
|
||||
p.registerInfix(token.GT, p.parseInfixExpression)
|
||||
p.registerInfix(token.LT, p.parseInfixExpression)
|
||||
p.registerInfix(token.EQ, p.parseInfixExpression)
|
||||
p.registerInfix(token.NOT_EQ, p.parseInfixExpression)
|
||||
p.registerInfix(token.LPAREN, p.parseCallExpression)
|
||||
p.registerInfix(token.LBRACKET, p.parseIndexExpression)
|
||||
// TODO: figure out why this can't be done from `parseProgram`
|
||||
p.nextToken()
|
||||
p.nextToken()
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *Parser) ParseProgram() *ast.Program {
|
||||
program := &ast.Program{}
|
||||
program.Statements = []ast.Statement{}
|
||||
for !p.curTokenIs(token.EOF) {
|
||||
stmt := p.parseStatement()
|
||||
if stmt != nil {
|
||||
program.Statements = append(program.Statements, stmt)
|
||||
}
|
||||
// NOTE: For now, this is not only eating the semicolon, it is also
|
||||
// eating every and all tokens until parse statement finds something
|
||||
// it deems valid.
|
||||
p.nextToken()
|
||||
}
|
||||
return program
|
||||
}
|
||||
|
||||
func (p *Parser) parseStatement() ast.Statement {
|
||||
switch p.curToken.Type {
|
||||
case token.LET:
|
||||
return p.parseLetStatement()
|
||||
case token.RETURN:
|
||||
return p.parseReturnStatement()
|
||||
}
|
||||
return p.parseExpressionStatement()
|
||||
}
|
||||
|
||||
func (p *Parser) parseBlockStatement() *ast.BlockStatement {
|
||||
block := &ast.BlockStatement{Token: p.curToken}
|
||||
block.Statements = []ast.Statement{}
|
||||
p.nextToken()
|
||||
for !p.curTokenIs(token.RBRACE) && !p.curTokenIs(token.EOF) {
|
||||
stmt := p.parseStatement()
|
||||
if stmt != nil {
|
||||
block.Statements = append(block.Statements, stmt)
|
||||
}
|
||||
// Consume the last token in the statement.
|
||||
p.nextToken()
|
||||
}
|
||||
return block
|
||||
}
|
||||
|
||||
func (p *Parser) parseLetStatement() ast.Statement {
|
||||
stmt := &ast.LetStatement{Token: p.curToken}
|
||||
if !p.nextTokenIfPeekIs(token.IDENT) {
|
||||
return nil
|
||||
}
|
||||
stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
|
||||
if !p.nextTokenIfPeekIs(token.ASSIGN) {
|
||||
return nil
|
||||
}
|
||||
// Consume the assign.
|
||||
p.nextToken()
|
||||
stmt.Value = p.parseExpression(LOWEST)
|
||||
if p.peekTokenIs(token.SEMICOLON) {
|
||||
p.nextToken()
|
||||
}
|
||||
return stmt
|
||||
}
|
||||
|
||||
func (p *Parser) parseReturnStatement() ast.Statement {
|
||||
stmt := &ast.ReturnStatement{Token: p.curToken}
|
||||
p.nextToken()
|
||||
stmt.ReturnValue = p.parseExpression(LOWEST)
|
||||
if p.peekTokenIs(token.SEMICOLON) {
|
||||
p.nextToken()
|
||||
}
|
||||
return stmt
|
||||
}
|
||||
|
||||
func (p *Parser) parseExpressionStatement() ast.Statement {
|
||||
stmt := &ast.ExpressionStatement{Token: p.curToken}
|
||||
stmt.Expression = p.parseExpression(LOWEST)
|
||||
// The semicolon is optional for expression statements so they're easier
|
||||
// to type on the REPL. NOTE: It is weird that the last token parsed by
|
||||
// parseExpression does not get consumed.
|
||||
if p.peekTokenIs(token.SEMICOLON) {
|
||||
p.nextToken()
|
||||
}
|
||||
return stmt
|
||||
}
|
||||
|
||||
func (p *Parser) parseExpression(precedence int) ast.Expression {
|
||||
// TODO: Could this be replaced with an `ok` check?
|
||||
prefix := p.prefixParseFns[p.curToken.Type]
|
||||
if prefix == nil {
|
||||
p.noPrefixParseFnError(p.curToken.Type)
|
||||
return nil
|
||||
}
|
||||
curExpr := prefix()
|
||||
for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
|
||||
infix := p.infixParseFns[p.peekToken.Type]
|
||||
if infix == nil {
|
||||
return curExpr
|
||||
}
|
||||
p.nextToken()
|
||||
curExpr = infix(curExpr)
|
||||
}
|
||||
return curExpr
|
||||
}
|
||||
|
||||
func (p *Parser) parseIdentifier() ast.Expression {
|
||||
return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
|
||||
}
|
||||
|
||||
func (p *Parser) parseIntegerLiteral() ast.Expression {
|
||||
exp := &ast.IntegerLiteral{Token: p.curToken}
|
||||
literal, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
|
||||
if err != nil {
|
||||
p.errors = append(p.errors, fmt.Sprintf("could not parse %q as an integer", p.curToken.Literal))
|
||||
return nil
|
||||
}
|
||||
exp.Value = literal
|
||||
return exp
|
||||
}
|
||||
|
||||
func (p *Parser) parseBoolean() ast.Expression {
|
||||
return &ast.Boolean{Token: p.curToken, Value: p.curTokenIs(token.TRUE)}
|
||||
}
|
||||
|
||||
func (p *Parser) parsePrefixExpression() ast.Expression {
|
||||
exp := &ast.PrefixExpression{
|
||||
Token: p.curToken,
|
||||
Operator: p.curToken.Literal,
|
||||
}
|
||||
p.nextToken()
|
||||
exp.Right = p.parseExpression(PREFIX)
|
||||
return exp
|
||||
}
|
||||
|
||||
func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
|
||||
exp := &ast.InfixExpression{
|
||||
Token: p.curToken,
|
||||
Operator: p.curToken.Literal,
|
||||
Left: left,
|
||||
}
|
||||
precedence := p.curPrecedence()
|
||||
p.nextToken()
|
||||
exp.Right = p.parseExpression(precedence)
|
||||
return exp
|
||||
}
|
||||
|
||||
func (p *Parser) parseGroupedExpression() ast.Expression {
|
||||
p.nextToken()
|
||||
exp := p.parseExpression(LOWEST)
|
||||
if !p.nextTokenIfPeekIs(token.RPAREN) {
|
||||
// TODO: Would probably be good to emit an error here?
|
||||
return nil
|
||||
}
|
||||
return exp
|
||||
}
|
||||
|
||||
func (p *Parser) parseIfExpression() ast.Expression {
|
||||
exp := &ast.IfExpression{Token: p.curToken}
|
||||
if !p.nextTokenIfPeekIs(token.LPAREN) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
p.nextToken()
|
||||
exp.Condition = p.parseExpression(LOWEST)
|
||||
if !p.nextTokenIfPeekIs(token.RPAREN) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
if !p.nextTokenIfPeekIs(token.LBRACE) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
exp.Consequence = p.parseBlockStatement()
|
||||
if p.peekTokenIs(token.ELSE) {
|
||||
p.nextToken()
|
||||
if !p.nextTokenIfPeekIs(token.LBRACE) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
exp.Alternative = p.parseBlockStatement()
|
||||
}
|
||||
// We don't consume the RBRACE because it acts as our "end of statement"
|
||||
// token, and it's consumed by parseProgram.
|
||||
return exp
|
||||
}
|
||||
|
||||
func (p *Parser) parseFunctionLiteral() ast.Expression {
|
||||
fn := &ast.FunctionLiteral{Token: p.curToken}
|
||||
if !p.nextTokenIfPeekIs(token.LPAREN) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
fn.Parameters = p.parseFunctionParameters()
|
||||
if !p.nextTokenIfPeekIs(token.LBRACE) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
fn.Body = p.parseBlockStatement()
|
||||
return fn
|
||||
}
|
||||
|
||||
func (p *Parser) parseFunctionParameters() []*ast.Identifier {
|
||||
params := []*ast.Identifier{}
|
||||
if p.peekTokenIs(token.RPAREN) {
|
||||
p.nextToken()
|
||||
return params
|
||||
}
|
||||
// Consume the LPAREN
|
||||
p.nextToken()
|
||||
params = append(params, &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal})
|
||||
for p.peekTokenIs(token.COMMA) {
|
||||
// Consume the previous identifier.
|
||||
p.nextToken()
|
||||
// Consume the comma.
|
||||
p.nextToken()
|
||||
params = append(params, &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal})
|
||||
}
|
||||
if !p.nextTokenIfPeekIs(token.RPAREN) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func (p *Parser) parseCallExpression(function ast.Expression) ast.Expression {
|
||||
call := &ast.CallExpression{Token: p.curToken, Function: function}
|
||||
call.Arguments = p.parseExpressionList(token.RPAREN)
|
||||
return call
|
||||
}
|
||||
|
||||
func (p *Parser) parseExpressionList(end token.TokenType) []ast.Expression {
|
||||
args := []ast.Expression{}
|
||||
if p.peekTokenIs(end) {
|
||||
p.nextToken()
|
||||
return args
|
||||
}
|
||||
// Consume the LPAREN
|
||||
p.nextToken()
|
||||
args = append(args, p.parseExpression(LOWEST))
|
||||
for p.peekTokenIs(token.COMMA) {
|
||||
// Consume last token of the previous expression.
|
||||
p.nextToken()
|
||||
// Consume the comma.
|
||||
p.nextToken()
|
||||
args = append(args, p.parseExpression(LOWEST))
|
||||
}
|
||||
if !p.nextTokenIfPeekIs(end) {
|
||||
// TODO: Would be good to emit an error here.
|
||||
return nil
|
||||
}
|
||||
return args
|
||||
}
|
||||
|
||||
func (p *Parser) parseStringLiteral() ast.Expression {
|
||||
return &ast.StringLiteral{Token: p.curToken, Value: p.curToken.Literal}
|
||||
}
|
||||
|
||||
func (p *Parser) parseArrayLiteral() ast.Expression {
|
||||
array := &ast.ArrayLiteral{Token: p.curToken}
|
||||
array.Elements = p.parseExpressionList(token.RBRACKET)
|
||||
return array
|
||||
}
|
||||
|
||||
func (p *Parser) parseIndexExpression(left ast.Expression) ast.Expression {
|
||||
ie := &ast.IndexExpression{Token: p.curToken, Left: left}
|
||||
p.nextToken()
|
||||
ie.Index = p.parseExpression(LOWEST)
|
||||
if !p.nextTokenIfPeekIs(token.RBRACKET) {
|
||||
return nil
|
||||
}
|
||||
return ie
|
||||
}
|
||||
|
||||
func (p *Parser) parseHashLiteral() ast.Expression {
|
||||
hash := &ast.HashLiteral{
|
||||
Token: p.curToken,
|
||||
Pairs: map[ast.Expression]ast.Expression{},
|
||||
}
|
||||
for !p.peekTokenIs(token.RBRACE) {
|
||||
p.nextToken()
|
||||
k := p.parseExpression(LOWEST)
|
||||
if !p.nextTokenIfPeekIs(token.COLON) {
|
||||
return nil
|
||||
}
|
||||
p.nextToken()
|
||||
v := p.parseExpression(LOWEST)
|
||||
hash.Pairs[k] = v
|
||||
if !p.peekTokenIs(token.RBRACE) && !p.nextTokenIfPeekIs(token.COMMA) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if !p.nextTokenIfPeekIs(token.RBRACE) {
|
||||
return nil
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
func (p *Parser) curTokenIs(typ token.TokenType) bool {
|
||||
return p.curToken.Type == typ
|
||||
}
|
||||
|
||||
func (p *Parser) peekTokenIs(typ token.TokenType) bool {
|
||||
return p.peekToken.Type == typ
|
||||
}
|
||||
|
||||
// NOTE: I'll leave the name as-is to avoid deviating from the book (maybe a
|
||||
// rename at the end?), but I think `nextTokenIfPeek` would be a much better
|
||||
// name for this.
|
||||
func (p *Parser) nextTokenIfPeekIs(typ token.TokenType) bool {
|
||||
if p.peekTokenIs(typ) {
|
||||
p.nextToken()
|
||||
return true
|
||||
}
|
||||
p.peekError(typ)
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) nextToken() {
|
||||
p.curToken = p.peekToken
|
||||
p.peekToken = p.l.NextToken()
|
||||
}
|
||||
|
||||
func (p *Parser) Errors() []string {
|
||||
return p.errors
|
||||
}
|
||||
|
||||
func (p *Parser) peekError(typ token.TokenType) {
|
||||
p.errors = append(
|
||||
p.errors,
|
||||
fmt.Sprintf(
|
||||
"expected next token to be %q, got %q instead",
|
||||
typ,
|
||||
p.peekToken.Type,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
func (p *Parser) noPrefixParseFnError(t token.TokenType) {
|
||||
p.errors = append(
|
||||
p.errors,
|
||||
fmt.Sprintf("no prefix parse function found for %q", t),
|
||||
)
|
||||
}
|
||||
|
||||
func (p *Parser) registerPrefix(typ token.TokenType, fn prefixParseFn) {
|
||||
p.prefixParseFns[typ] = fn
|
||||
}
|
||||
|
||||
func (p *Parser) registerInfix(typ token.TokenType, fn infixParseFn) {
|
||||
p.infixParseFns[typ] = fn
|
||||
}
|
||||
1084
pkg/parser/parser_test.go
Normal file
1084
pkg/parser/parser_test.go
Normal file
File diff suppressed because it is too large
Load diff
44
pkg/parser/precedence.go
Normal file
44
pkg/parser/precedence.go
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
const (
|
||||
_ int = iota
|
||||
LOWEST
|
||||
EQUALS // ==
|
||||
LESSGREATER // > or <
|
||||
SUM // +
|
||||
PRODUCT // *
|
||||
PREFIX // -X or !X
|
||||
CALL // myFunction(X)
|
||||
INDEX // array[index]
|
||||
)
|
||||
|
||||
var precedences = map[token.TokenType]int{
|
||||
token.EQ: EQUALS,
|
||||
token.NOT_EQ: EQUALS,
|
||||
token.GT: LESSGREATER,
|
||||
token.LT: LESSGREATER,
|
||||
token.PLUS: SUM,
|
||||
token.MINUS: SUM,
|
||||
token.ASTERISK: PRODUCT,
|
||||
token.SLASH: PRODUCT,
|
||||
token.LPAREN: CALL,
|
||||
token.LBRACKET: INDEX,
|
||||
}
|
||||
|
||||
func (p *Parser) peekPrecedence() int {
|
||||
if pr, ok := precedences[p.peekToken.Type]; ok {
|
||||
return pr
|
||||
}
|
||||
return LOWEST
|
||||
}
|
||||
|
||||
func (p *Parser) curPrecedence() int {
|
||||
if pr, ok := precedences[p.curToken.Type]; ok {
|
||||
return pr
|
||||
}
|
||||
return LOWEST
|
||||
}
|
||||
59
pkg/repl/repl.go
Normal file
59
pkg/repl/repl.go
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
package repl
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/evaluator"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/lexer"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/object"
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/parser"
|
||||
)
|
||||
|
||||
const PROMPT = ">> "
|
||||
|
||||
func Start(in io.Reader, out io.Writer) {
|
||||
scanner := bufio.NewScanner(in)
|
||||
env := object.NewEnvironment()
|
||||
for {
|
||||
fmt.Fprint(out, PROMPT)
|
||||
if !scanner.Scan() {
|
||||
return
|
||||
}
|
||||
l := lexer.New(scanner.Text())
|
||||
p := parser.New(l)
|
||||
program := p.ParseProgram()
|
||||
if len(p.Errors()) != 0 {
|
||||
printParserErrors(out, p.Errors())
|
||||
continue
|
||||
}
|
||||
res := evaluator.Eval(program, env)
|
||||
if res != nil {
|
||||
io.WriteString(out, res.Inspect())
|
||||
io.WriteString(out, "\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MONKEY_FACE = ` __,__
|
||||
.--. .-" "-. .--.
|
||||
/ .. \/ .-. .-. \/ .. \
|
||||
| | '| / Y \ |' | |
|
||||
| \ \ \ 0 | 0 / / / |
|
||||
\ '- ,\.-"""""""-./, -' /
|
||||
''-' /_ ^ ^ _\ '-''
|
||||
| \._ _./ |
|
||||
\ \ '~' / /
|
||||
'._ '-=-' _.'
|
||||
'-----'
|
||||
`
|
||||
|
||||
func printParserErrors(out io.Writer, errors []string) {
|
||||
io.WriteString(out, MONKEY_FACE)
|
||||
io.WriteString(out, "Woops! We ran into some monkey business here!\n")
|
||||
io.WriteString(out, " parser errors:\n")
|
||||
for _, msg := range errors {
|
||||
io.WriteString(out, "\t"+msg+"\n")
|
||||
}
|
||||
}
|
||||
68
pkg/token/token.go
Normal file
68
pkg/token/token.go
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
package token
|
||||
|
||||
type TokenType string
|
||||
|
||||
const (
|
||||
ILLEGAL = "ILLEGAL"
|
||||
EOF = "EOF"
|
||||
|
||||
// Identifiers + Literals
|
||||
IDENT = "IDENT"
|
||||
INT = "INT"
|
||||
STRING = "STRING"
|
||||
|
||||
// Operators
|
||||
ASSIGN = "="
|
||||
PLUS = "+"
|
||||
MINUS = "-"
|
||||
BANG = "!"
|
||||
ASTERISK = "*"
|
||||
SLASH = "/"
|
||||
LT = "<"
|
||||
GT = ">"
|
||||
EQ = "=="
|
||||
NOT_EQ = "!="
|
||||
|
||||
// Delimiters
|
||||
COMMA = ","
|
||||
SEMICOLON = ";"
|
||||
COLON = ":"
|
||||
|
||||
LPAREN = "("
|
||||
RPAREN = ")"
|
||||
LBRACE = "{"
|
||||
RBRACE = "}"
|
||||
LBRACKET = "["
|
||||
RBRACKET = "]"
|
||||
|
||||
// Keywords
|
||||
FUNCTION = "FUNCTION"
|
||||
LET = "LET"
|
||||
TRUE = "TRUE"
|
||||
FALSE = "FALSE"
|
||||
IF = "IF"
|
||||
ELSE = "ELSE"
|
||||
RETURN = "RETURN"
|
||||
)
|
||||
|
||||
var keywords = map[string]TokenType{
|
||||
"fn": FUNCTION,
|
||||
"let": LET,
|
||||
"true": TRUE,
|
||||
"false": FALSE,
|
||||
"if": IF,
|
||||
"else": ELSE,
|
||||
"return": RETURN,
|
||||
}
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Literal string
|
||||
}
|
||||
|
||||
func LookupIdent(ident string) TokenType {
|
||||
if typ, ok := keywords[ident]; ok {
|
||||
return typ
|
||||
}
|
||||
return IDENT
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue