go-parsekit/tokenize/token.go

48 lines
1.2 KiB
Go

package tokenize
import (
"fmt"
)
// Token defines a lexical token as produced by tokenize.Handlers.
//
// The Type and Value fields are optional fields that can be filled
// with data at will.
//
// The use of the Type field is to let a tokenizer communicate to
// the parser what type of token it's handling.
//
// The use of the Value field is to store any kind af data along with the token.
// One use of this can be found in the built-in token maker functions like
// MakeInt8Token(), which store an interpreted version of the input string
// in the Value field.
type Token struct {
Type interface{} // optional token type, can be any type that a parser author sees fit
Value interface{} // optional token value, of any type as well
}
func (t Token) String() string {
tokenType := ""
if t.Type != nil {
tokenType = fmt.Sprintf("%v", t.Type)
}
value := ""
if t.Value != nil {
switch t.Value.(type) {
case []Token:
return fmt.Sprintf("%v%v", tokenType, t.Value)
case string:
value = fmt.Sprintf("%q", t.Value)
case rune:
value = fmt.Sprintf("'%c'", t.Value)
case bool:
value = fmt.Sprintf("%v", t.Value)
default:
value = fmt.Sprintf("(%T)%v", t.Value, t.Value)
}
}
return fmt.Sprintf("%v(%s)", tokenType, value)
}