Refactor to multiple packages.

This commit is contained in:
Mariano Uvalle 2023-05-10 03:58:54 +00:00
parent 7cdaa49a8e
commit 02aef95bff
6 changed files with 38 additions and 36 deletions

View file

@ -7,6 +7,7 @@ import (
"os" "os"
lerrors "github.com/AYM1607/crafting-interpreters/golox/internal/errors" lerrors "github.com/AYM1607/crafting-interpreters/golox/internal/errors"
"github.com/AYM1607/crafting-interpreters/golox/internal/scanner"
) )
var ErrInvalidScriptFile = errors.New("could not read script file") var ErrInvalidScriptFile = errors.New("could not read script file")
@ -38,7 +39,7 @@ func RunFile(path string) error {
} }
func Run(source string) { func Run(source string) {
s := NewScanner(source) s := scanner.NewScanner(source)
tokens := s.ScanTokens() tokens := s.ScanTokens()
for _, t := range tokens { for _, t := range tokens {

View file

@ -1,16 +1,17 @@
package runner package scanner
import ( import (
"strconv" "strconv"
lerrors "github.com/AYM1607/crafting-interpreters/golox/internal/errors" lerrors "github.com/AYM1607/crafting-interpreters/golox/internal/errors"
"github.com/AYM1607/crafting-interpreters/golox/internal/types"
) )
type Scanner struct { type Scanner struct {
source string source string
// State. // State.
tokens []Token tokens []types.Token
start int start int
current int current int
line int line int
@ -19,7 +20,7 @@ type Scanner struct {
func NewScanner(source string) *Scanner { func NewScanner(source string) *Scanner {
return &Scanner{ return &Scanner{
source: source, source: source,
tokens: []Token{}, tokens: []types.Token{},
start: 0, start: 0,
current: 0, current: 0,
@ -27,13 +28,13 @@ func NewScanner(source string) *Scanner {
} }
} }
func (s *Scanner) ScanTokens() []Token { func (s *Scanner) ScanTokens() []types.Token {
for !s.isAtEnd() { for !s.isAtEnd() {
s.start = s.current s.start = s.current
s.scanToken() s.scanToken()
} }
s.tokens = append(s.tokens, NewToken(EOF, "", nil, s.line)) s.tokens = append(s.tokens, types.NewToken(types.EOF, "", nil, s.line))
return s.tokens return s.tokens
} }
@ -41,47 +42,47 @@ func (s *Scanner) scanToken() {
c := s.advance() c := s.advance()
switch c { switch c {
case '(': case '(':
s.addToken(LPAREN) s.addToken(types.LPAREN)
case ')': case ')':
s.addToken(RPAREN) s.addToken(types.RPAREN)
case '{': case '{':
s.addToken(LBRACE) s.addToken(types.LBRACE)
case '}': case '}':
s.addToken(RBRACE) s.addToken(types.RBRACE)
case ',': case ',':
s.addToken(COMMA) s.addToken(types.COMMA)
case '.': case '.':
s.addToken(DOT) s.addToken(types.DOT)
case '-': case '-':
s.addToken(MINUS) s.addToken(types.MINUS)
case '+': case '+':
s.addToken(PLUS) s.addToken(types.PLUS)
case ';': case ';':
s.addToken(SEMI) s.addToken(types.SEMI)
case '*': case '*':
s.addToken(STAR) s.addToken(types.STAR)
case '!': case '!':
tok := BANG tok := types.BANG
if s.match('=') { if s.match('=') {
tok = BANG_EQUAL tok = types.BANG_EQUAL
} }
s.addToken(tok) s.addToken(tok)
case '=': case '=':
tok := EQUAL tok := types.EQUAL
if s.match('=') { if s.match('=') {
tok = EQUAL_EQUAL tok = types.EQUAL_EQUAL
} }
s.addToken(tok) s.addToken(tok)
case '<': case '<':
tok := LT tok := types.LT
if s.match('=') { if s.match('=') {
tok = LTE tok = types.LTE
} }
s.addToken(tok) s.addToken(tok)
case '>': case '>':
tok := GT tok := types.GT
if s.match('=') { if s.match('=') {
tok = GTE tok = types.GTE
} }
s.addToken(tok) s.addToken(tok)
case '/': case '/':
@ -93,7 +94,7 @@ func (s *Scanner) scanToken() {
} else if s.match('*') { } else if s.match('*') {
s.scanInlineComment() s.scanInlineComment()
} else { } else {
s.addToken(SLASH) s.addToken(types.SLASH)
} }
case '"': case '"':
s.scanString() s.scanString()
@ -174,7 +175,7 @@ func (s *Scanner) scanString() {
// Trim enclosing quotes // Trim enclosing quotes
val := s.source[s.start+1 : s.current-1] val := s.source[s.start+1 : s.current-1]
s.addTokenWithLiteral(STRING, val) s.addTokenWithLiteral(types.STRING, val)
} }
func (s *Scanner) scanNumber() { func (s *Scanner) scanNumber() {
@ -202,7 +203,7 @@ func (s *Scanner) scanNumber() {
64, 64,
) )
s.addTokenWithLiteral( s.addTokenWithLiteral(
NUMBER, types.NUMBER,
val, val,
) )
} }
@ -212,8 +213,8 @@ func (s *Scanner) scanIdentifier() {
s.advance() s.advance()
} }
l := s.source[s.start:s.current] l := s.source[s.start:s.current]
typ := IDENT typ := types.IDENT
if kTyp, ok := KeywordTypes[l]; ok { if kTyp, ok := types.KeywordTypes[l]; ok {
typ = kTyp typ = kTyp
} }
s.addToken(typ) s.addToken(typ)
@ -252,16 +253,16 @@ func (s *Scanner) scanInlineComment() {
} }
// addToken produces a single token without a literal value. // addToken produces a single token without a literal value.
func (s *Scanner) addToken(typ TokenType) { func (s *Scanner) addToken(typ types.TokenType) {
s.addTokenWithLiteral(typ, nil) s.addTokenWithLiteral(typ, nil)
} }
// addTokenWithLiteral produces a single token with the given literal value. // addTokenWithLiteral produces a single token with the given literal value.
func (s *Scanner) addTokenWithLiteral(typ TokenType, literal interface{}) { func (s *Scanner) addTokenWithLiteral(typ types.TokenType, literal interface{}) {
lexme := s.source[s.start:s.current] lexme := s.source[s.start:s.current]
s.tokens = append( s.tokens = append(
s.tokens, s.tokens,
NewToken(typ, lexme, literal, s.line), types.NewToken(typ, lexme, literal, s.line),
) )
} }

View file

@ -1,4 +1,4 @@
package runner package scanner
func isIdentAlphaNumeric(c byte) bool { func isIdentAlphaNumeric(c byte) bool {
return isIdentAlpha(c) || isDigit(c) return isIdentAlpha(c) || isDigit(c)

View file

@ -1,4 +1,4 @@
package runner package types
var KeywordTypes = map[string]TokenType{ var KeywordTypes = map[string]TokenType{
"and": AND, "and": AND,

View file

@ -1,4 +1,4 @@
package runner package types
import "fmt" import "fmt"

View file

@ -1,4 +1,4 @@
package runner package types
type TokenType string type TokenType string