Scanner working for single character tokens.
This commit is contained in:
parent
3d0f3b95d5
commit
d8dd8fd49a
6 changed files with 241 additions and 0 deletions
33
golox/internal/runner/token.go
Normal file
33
golox/internal/runner/token.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package runner
|
||||
|
||||
import "fmt"
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Lexme string
|
||||
Literal interface{}
|
||||
Line int
|
||||
}
|
||||
|
||||
func NewToken(
|
||||
typ TokenType,
|
||||
lexme string,
|
||||
lit interface{},
|
||||
line int,
|
||||
) Token {
|
||||
return Token{
|
||||
Type: typ,
|
||||
Lexme: lexme,
|
||||
Literal: lit,
|
||||
Line: line,
|
||||
}
|
||||
}
|
||||
|
||||
func (t Token) String() string {
|
||||
return fmt.Sprintf(
|
||||
"%s %s %v",
|
||||
t.Type,
|
||||
t.Lexme,
|
||||
t.Literal,
|
||||
)
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue