go-parsekit/tokenize/tokenizer.go

35 lines
1.1 KiB
Go

package tokenize
import (
"git.makaay.nl/mauricem/go-parsekit/common"
)
// Tokenizer is the top-level struct that holds the configuration for
// a parser that is based solely on a TokenHandler function.
// The Tokenizer can be instantiated using the parsekit.NewTokenizer()
// method.
type Tokenizer struct {
handler TokenHandler
}
// NewTokenizer instantiates a new Tokenizer, based on the provided TokenHandler.
func NewTokenizer(tokenHandler TokenHandler) *Tokenizer {
return &Tokenizer{tokenHandler}
}
// Execute feeds the input to the wrapped TokenHandler function.
// For an overview of allowed inputs, take a look at the documentation for parsekit.reader.New().
//
// It returns the TokenHandler's TokenHandlerResult. When an error occurred
// during parsing, the error will be set, nil otherwise.
func (t *Tokenizer) Execute(input interface{}) (*TokenHandlerResult, *common.Error) {
api := NewTokenAPI(input)
ok := t.handler(api)
if !ok {
err := &common.Error{Message: "mismatch", Cursor: common.Cursor{}}
return nil, err
}
return api.Result(), nil
}