Done with the book! Added hash maps and the puts builtin.
Signed-off-by: jmug <u.g.a.mariano@gmail.com>
This commit is contained in:
parent
fa9f450278
commit
fb25a86b91
11 changed files with 373 additions and 0 deletions
24
pkg/ast/hash.go
Normal file
24
pkg/ast/hash.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/token"
|
||||
)
|
||||
|
||||
type HashLiteral struct {
|
||||
Token token.Token // The "{" token
|
||||
Pairs map[Expression]Expression
|
||||
}
|
||||
|
||||
func (hl *HashLiteral) expressionNode() {}
|
||||
func (hl *HashLiteral) TokenLiteral() string {
|
||||
return hl.Token.Literal
|
||||
}
|
||||
func (hl *HashLiteral) String() string {
|
||||
pairs := []string{}
|
||||
for k, v := range hl.Pairs {
|
||||
pairs = append(pairs, k.String()+":"+v.String())
|
||||
}
|
||||
return "{" + strings.Join(pairs, ", ") + "}"
|
||||
}
|
||||
|
|
@ -1,12 +1,21 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"code.jmug.me/jmug/interpreter-in-go/pkg/object"
|
||||
)
|
||||
|
||||
var builtins = map[string]*object.Builtin{
|
||||
"puts": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
for _, arg := range args {
|
||||
fmt.Println(arg.Inspect())
|
||||
}
|
||||
return _NULL
|
||||
},
|
||||
},
|
||||
"len": {
|
||||
Fn: func(args ...object.Object) object.Object {
|
||||
if len(args) != 1 {
|
||||
|
|
|
|||
|
|
@ -91,6 +91,8 @@ func Eval(node ast.Node, env *object.Environment) object.Object {
|
|||
return index
|
||||
}
|
||||
return evalIndexExpression(left, index)
|
||||
case *ast.HashLiteral:
|
||||
return evalHashLiteral(node, env)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -251,6 +253,8 @@ func evalIndexExpression(left, index object.Object) object.Object {
|
|||
switch {
|
||||
case left.Type() == object.ARRAY_OBJ && index.Type() == object.INTEGER_OBJ:
|
||||
return evalArrayIndexExpression(left, index)
|
||||
case left.Type() == object.HASH_OBJ:
|
||||
return evalHashIndexExpression(left, index)
|
||||
default:
|
||||
return newError("index operator not supported: %s", left.Type())
|
||||
}
|
||||
|
|
@ -265,6 +269,40 @@ func evalArrayIndexExpression(arrayObj, indexObj object.Object) object.Object {
|
|||
return array[index]
|
||||
}
|
||||
|
||||
func evalHashIndexExpression(hashObj, index object.Object) object.Object {
|
||||
hash := hashObj.(*object.Hash)
|
||||
hashable, okHash := index.(object.Hashable)
|
||||
if !okHash {
|
||||
return newError("unusable as hash key: %s", index.Type())
|
||||
}
|
||||
pair, okPair := hash.Pairs[hashable.HashKey()]
|
||||
if !okPair {
|
||||
return _NULL
|
||||
}
|
||||
return pair.Value
|
||||
}
|
||||
|
||||
func evalHashLiteral(hash *ast.HashLiteral, env *object.Environment) object.Object {
|
||||
pairs := map[object.HashKey]object.HashPair{}
|
||||
for ke, ve := range hash.Pairs {
|
||||
k := Eval(ke, env)
|
||||
if isError(k) {
|
||||
return k
|
||||
}
|
||||
hashable, ok := k.(object.Hashable)
|
||||
if !ok {
|
||||
return newError("unusable as hash key: %s", k.Type())
|
||||
}
|
||||
v := Eval(ve, env)
|
||||
if isError(v) {
|
||||
return v
|
||||
}
|
||||
hashKey := hashable.HashKey()
|
||||
pairs[hashKey] = object.HashPair{Key: k, Value: v}
|
||||
}
|
||||
return &object.Hash{Pairs: pairs}
|
||||
}
|
||||
|
||||
func applyFunction(fnObj object.Object, args []object.Object) object.Object {
|
||||
switch fn := fnObj.(type) {
|
||||
case *object.Function:
|
||||
|
|
|
|||
|
|
@ -190,6 +190,10 @@ if (10 > 1) {
|
|||
`"Hello" - "World"`,
|
||||
"unknown operator: STRING - STRING",
|
||||
},
|
||||
{
|
||||
`{"name": "Monkey"}[fn(x) { x }];`,
|
||||
"unusable as hash key: FUNCTION",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
|
|
@ -406,6 +410,92 @@ func TestArrayIndexExpressions(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestHashLiterals(t *testing.T) {
|
||||
input := `let two = "two";
|
||||
{
|
||||
"one": 10 - 9,
|
||||
two: 1 + 1,
|
||||
"thr" + "ee": 6 / 2,
|
||||
4: 4,
|
||||
true: 5,
|
||||
false: 6
|
||||
}`
|
||||
|
||||
evaluated := testEval(input)
|
||||
result, ok := evaluated.(*object.Hash)
|
||||
if !ok {
|
||||
t.Fatalf("Eval didn't return Hash. got=%T (%+v)", evaluated, evaluated)
|
||||
}
|
||||
|
||||
expected := map[object.HashKey]int64{
|
||||
(&object.String{Value: "one"}).HashKey(): 1,
|
||||
(&object.String{Value: "two"}).HashKey(): 2,
|
||||
(&object.String{Value: "three"}).HashKey(): 3,
|
||||
(&object.Integer{Value: 4}).HashKey(): 4,
|
||||
_TRUE.HashKey(): 5,
|
||||
_FALSE.HashKey(): 6,
|
||||
}
|
||||
|
||||
if len(result.Pairs) != len(expected) {
|
||||
t.Fatalf("Hash has wrong num of pairs. got=%d", len(result.Pairs))
|
||||
}
|
||||
|
||||
for expectedKey, expectedValue := range expected {
|
||||
pair, ok := result.Pairs[expectedKey]
|
||||
if !ok {
|
||||
t.Errorf("no pair for given key in Pairs")
|
||||
}
|
||||
|
||||
testIntegerObject(t, pair.Value, expectedValue)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHashIndexExpressions(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
`{"foo": 5}["foo"]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{"foo": 5}["bar"]`,
|
||||
nil,
|
||||
},
|
||||
{
|
||||
`let key = "foo"; {"foo": 5}[key]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{}["foo"]`,
|
||||
nil,
|
||||
},
|
||||
{
|
||||
`{5: 5}[5]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{true: 5}[true]`,
|
||||
5,
|
||||
},
|
||||
{
|
||||
`{false: 5}[false]`,
|
||||
5,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
evaluated := testEval(tt.input)
|
||||
integer, ok := tt.expected.(int)
|
||||
if ok {
|
||||
testIntegerObject(t, evaluated, int64(integer))
|
||||
} else {
|
||||
testNullObject(t, evaluated)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testNullObject(t *testing.T, obj object.Object) bool {
|
||||
if obj != _NULL {
|
||||
t.Errorf("object is not NULL. got=%T (%+v)", obj, obj)
|
||||
|
|
|
|||
|
|
@ -55,6 +55,8 @@ func (l *Lexer) NextToken() token.Token {
|
|||
tok = newToken(token.COMMA, l.ch)
|
||||
case ';':
|
||||
tok = newToken(token.SEMICOLON, l.ch)
|
||||
case ':':
|
||||
tok = newToken(token.COLON, l.ch)
|
||||
case '(':
|
||||
tok = newToken(token.LPAREN, l.ch)
|
||||
case ')':
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ if (5 < 10) {
|
|||
"foobar"
|
||||
"foo bar"
|
||||
[1, 2];
|
||||
{1: 2};
|
||||
`
|
||||
|
||||
tests := []struct {
|
||||
|
|
@ -116,6 +117,12 @@ if (5 < 10) {
|
|||
{token.INT, "2"},
|
||||
{token.RBRACKET, "]"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.LBRACE, "{"},
|
||||
{token.INT, "1"},
|
||||
{token.COLON, ":"},
|
||||
{token.INT, "2"},
|
||||
{token.RBRACE, "}"},
|
||||
{token.SEMICOLON, ";"},
|
||||
{token.EOF, ""},
|
||||
}
|
||||
|
||||
|
|
|
|||
30
pkg/object/hash_key.go
Normal file
30
pkg/object/hash_key.go
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
package object
|
||||
|
||||
import "hash/fnv"
|
||||
|
||||
type Hashable interface {
|
||||
HashKey() HashKey
|
||||
}
|
||||
|
||||
type HashKey struct {
|
||||
Type ObjectType
|
||||
Value uint64
|
||||
}
|
||||
|
||||
func (b *Boolean) HashKey() HashKey {
|
||||
var val uint64 = 0
|
||||
if b.Value {
|
||||
val = 1
|
||||
}
|
||||
return HashKey{Type: b.Type(), Value: val}
|
||||
}
|
||||
|
||||
func (i *Integer) HashKey() HashKey {
|
||||
return HashKey{Type: i.Type(), Value: uint64(i.Value)}
|
||||
}
|
||||
|
||||
func (s *String) HashKey() HashKey {
|
||||
h := fnv.New64()
|
||||
h.Write([]byte(s.Value))
|
||||
return HashKey{Type: s.Type(), Value: h.Sum64()}
|
||||
}
|
||||
|
|
@ -20,6 +20,7 @@ const (
|
|||
STRING_OBJ = "STRING"
|
||||
BUILTIN_OBJ = "BUILTIN"
|
||||
ARRAY_OBJ = "ARRAY"
|
||||
HASH_OBJ = "HASH"
|
||||
)
|
||||
|
||||
type Object interface {
|
||||
|
|
@ -138,3 +139,26 @@ func (a *Array) Inspect() string {
|
|||
}
|
||||
return fmt.Sprintf("[%s]", strings.Join(elements, ", "))
|
||||
}
|
||||
|
||||
type HashPair struct {
|
||||
Key Object
|
||||
Value Object
|
||||
}
|
||||
|
||||
type Hash struct {
|
||||
Pairs map[HashKey]HashPair
|
||||
}
|
||||
|
||||
func (h *Hash) Type() ObjectType {
|
||||
return HASH_OBJ
|
||||
}
|
||||
func (h *Hash) Inspect() string {
|
||||
pairs := []string{}
|
||||
for _, p := range h.Pairs {
|
||||
pairs = append(
|
||||
pairs,
|
||||
fmt.Sprintf("%s: %s", p.Key.Inspect(), p.Value.Inspect()),
|
||||
)
|
||||
}
|
||||
return "{" + strings.Join(pairs, ", ") + "}"
|
||||
}
|
||||
|
|
|
|||
22
pkg/object/object_test.go
Normal file
22
pkg/object/object_test.go
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
package object
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestStringHashKey(t *testing.T) {
|
||||
hello1 := &String{Value: "Hello World"}
|
||||
hello2 := &String{Value: "Hello World"}
|
||||
diff1 := &String{Value: "My name is johnny"}
|
||||
diff2 := &String{Value: "My name is johnny"}
|
||||
|
||||
if hello1.HashKey() != hello2.HashKey() {
|
||||
t.Errorf("strings with same content have different hash keys")
|
||||
}
|
||||
|
||||
if diff1.HashKey() != diff2.HashKey() {
|
||||
t.Errorf("strings with same content have different hash keys")
|
||||
}
|
||||
|
||||
if hello1.HashKey() == diff1.HashKey() {
|
||||
t.Errorf("strings with different content have same hash keys")
|
||||
}
|
||||
}
|
||||
|
|
@ -42,6 +42,7 @@ func New(l *lexer.Lexer) *Parser {
|
|||
p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
|
||||
p.registerPrefix(token.STRING, p.parseStringLiteral)
|
||||
p.registerPrefix(token.LBRACKET, p.parseArrayLiteral)
|
||||
p.registerPrefix(token.LBRACE, p.parseHashLiteral)
|
||||
// Infix registrations
|
||||
p.registerInfix(token.PLUS, p.parseInfixExpression)
|
||||
p.registerInfix(token.MINUS, p.parseInfixExpression)
|
||||
|
|
@ -327,6 +328,30 @@ func (p *Parser) parseIndexExpression(left ast.Expression) ast.Expression {
|
|||
return ie
|
||||
}
|
||||
|
||||
func (p *Parser) parseHashLiteral() ast.Expression {
|
||||
hash := &ast.HashLiteral{
|
||||
Token: p.curToken,
|
||||
Pairs: map[ast.Expression]ast.Expression{},
|
||||
}
|
||||
for !p.peekTokenIs(token.RBRACE) {
|
||||
p.nextToken()
|
||||
k := p.parseExpression(LOWEST)
|
||||
if !p.nextTokenIfPeekIs(token.COLON) {
|
||||
return nil
|
||||
}
|
||||
p.nextToken()
|
||||
v := p.parseExpression(LOWEST)
|
||||
hash.Pairs[k] = v
|
||||
if !p.peekTokenIs(token.RBRACE) && !p.nextTokenIfPeekIs(token.COMMA) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if !p.nextTokenIfPeekIs(token.RBRACE) {
|
||||
return nil
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
func (p *Parser) curTokenIs(typ token.TokenType) bool {
|
||||
return p.curToken.Type == typ
|
||||
}
|
||||
|
|
|
|||
|
|
@ -701,6 +701,108 @@ func TestParsingIndexExpressions(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParsingHashLiteralsStringKeys(t *testing.T) {
|
||||
input := `{"one": 1, "two": 2, "three": 3}`
|
||||
|
||||
l := lexer.New(input)
|
||||
p := New(l)
|
||||
program := p.ParseProgram()
|
||||
checkParserErrors(t, p)
|
||||
|
||||
stmt := program.Statements[0].(*ast.ExpressionStatement)
|
||||
hash, ok := stmt.Expression.(*ast.HashLiteral)
|
||||
if !ok {
|
||||
t.Fatalf("exp is not ast.HashLiteral. got=%T", stmt.Expression)
|
||||
}
|
||||
|
||||
if len(hash.Pairs) != 3 {
|
||||
t.Errorf("hash.Pairs has wrong length. got=%d", len(hash.Pairs))
|
||||
}
|
||||
|
||||
expected := map[string]int64{
|
||||
"one": 1,
|
||||
"two": 2,
|
||||
"three": 3,
|
||||
}
|
||||
|
||||
for key, value := range hash.Pairs {
|
||||
literal, ok := key.(*ast.StringLiteral)
|
||||
if !ok {
|
||||
t.Errorf("key is not ast.StringLiteral. got=%T", key)
|
||||
}
|
||||
|
||||
expectedValue := expected[literal.String()]
|
||||
|
||||
testIntegerLiteral(t, value, expectedValue)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParsingEmptyHashLiteral(t *testing.T) {
|
||||
input := "{}"
|
||||
|
||||
l := lexer.New(input)
|
||||
p := New(l)
|
||||
program := p.ParseProgram()
|
||||
checkParserErrors(t, p)
|
||||
|
||||
stmt := program.Statements[0].(*ast.ExpressionStatement)
|
||||
hash, ok := stmt.Expression.(*ast.HashLiteral)
|
||||
if !ok {
|
||||
t.Fatalf("exp is not ast.HashLiteral. got=%T", stmt.Expression)
|
||||
}
|
||||
|
||||
if len(hash.Pairs) != 0 {
|
||||
t.Errorf("hash.Pairs has wrong length. got=%d", len(hash.Pairs))
|
||||
}
|
||||
}
|
||||
|
||||
func TestParsingHashLiteralsWithExpressions(t *testing.T) {
|
||||
input := `{"one": 0 + 1, "two": 10 - 8, "three": 15 / 5}`
|
||||
|
||||
l := lexer.New(input)
|
||||
p := New(l)
|
||||
program := p.ParseProgram()
|
||||
checkParserErrors(t, p)
|
||||
|
||||
stmt := program.Statements[0].(*ast.ExpressionStatement)
|
||||
hash, ok := stmt.Expression.(*ast.HashLiteral)
|
||||
if !ok {
|
||||
t.Fatalf("exp is not ast.HashLiteral. got=%T", stmt.Expression)
|
||||
}
|
||||
|
||||
if len(hash.Pairs) != 3 {
|
||||
t.Errorf("hash.Pairs has wrong length. got=%d", len(hash.Pairs))
|
||||
}
|
||||
|
||||
tests := map[string]func(ast.Expression){
|
||||
"one": func(e ast.Expression) {
|
||||
testInfixExpression(t, e, 0, "+", 1)
|
||||
},
|
||||
"two": func(e ast.Expression) {
|
||||
testInfixExpression(t, e, 10, "-", 8)
|
||||
},
|
||||
"three": func(e ast.Expression) {
|
||||
testInfixExpression(t, e, 15, "/", 5)
|
||||
},
|
||||
}
|
||||
|
||||
for key, value := range hash.Pairs {
|
||||
literal, ok := key.(*ast.StringLiteral)
|
||||
if !ok {
|
||||
t.Errorf("key is not ast.StringLiteral. got=%T", key)
|
||||
continue
|
||||
}
|
||||
|
||||
testFunc, ok := tests[literal.String()]
|
||||
if !ok {
|
||||
t.Errorf("No test function for key %q found", literal.String())
|
||||
continue
|
||||
}
|
||||
|
||||
testFunc(value)
|
||||
}
|
||||
}
|
||||
|
||||
func testIdentifier(t *testing.T, exp ast.Expression, value string) bool {
|
||||
ident, ok := exp.(*ast.Identifier)
|
||||
if !ok {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue