Moving results to their own light weight tokenize.API.Result.

This commit is contained in:
Maurice Makaay 2019-07-28 22:35:33 +00:00
parent eda71f304e
commit 53ae659ef6
18 changed files with 168 additions and 73 deletions

View File

@ -77,7 +77,7 @@ var int64Token = tokenize.T.Int64(nil, bareInteger)
func (c *simpleCalculator) number(p *parse.API) { func (c *simpleCalculator) number(p *parse.API) {
if p.Accept(int64Token) { if p.Accept(int64Token) {
c.Result += c.op * p.Result.Tokens[0].Value.(int64) c.Result += c.op * p.Result.Token(0).Value.(int64)
p.Handle(c.operatorOrEndOfFile) p.Handle(c.operatorOrEndOfFile)
} else { } else {
p.Expected("integer number") p.Expected("integer number")

View File

@ -98,7 +98,7 @@ func (calc *calculator) expr(p *parse.API) {
var A = tokenize.A var A = tokenize.A
if p.Handle(calc.term) { if p.Handle(calc.term) {
for p.Accept(A.Add.Or(A.Subtract)) { for p.Accept(A.Add.Or(A.Subtract)) {
op := p.Result.Bytes[0] op := p.Result.Byte(0)
if !p.Handle(calc.term) { if !p.Handle(calc.term) {
return return
} }
@ -116,7 +116,7 @@ func (calc *calculator) term(p *parse.API) {
var A = tokenize.A var A = tokenize.A
if p.Handle(calc.factor) { if p.Handle(calc.factor) {
for p.Accept(A.Multiply.Or(A.Divide)) { for p.Accept(A.Multiply.Or(A.Divide)) {
op := p.Result.Bytes[0] op := p.Result.Byte(0)
if !p.Handle(calc.factor) { if !p.Handle(calc.factor) {
return return
} }
@ -134,7 +134,7 @@ func (calc *calculator) factor(p *parse.API) {
p.Skip(A.Blanks) p.Skip(A.Blanks)
switch { switch {
case p.Accept(T.Float64(nil, A.Signed(A.Decimal))): case p.Accept(T.Float64(nil, A.Signed(A.Decimal))):
value := p.Result.Tokens[0].Value.(float64) value := p.Result.Token(0).Value.(float64)
calc.interpreter.pushValue(value) calc.interpreter.pushValue(value)
case p.Skip(A.LeftParen): case p.Skip(A.LeftParen):
if !p.Handle(calc.expr) { if !p.Handle(calc.expr) {

View File

@ -29,7 +29,7 @@ func Example_dutchPostcodeUsingTokenizer() {
fmt.Printf("[%d] Input: %q Error: %s\n", i, input, err) fmt.Printf("[%d] Input: %q Error: %s\n", i, input, err)
} else { } else {
fmt.Printf("[%d] Input: %q Output: %s Tokens:", i, input, result) fmt.Printf("[%d] Input: %q Output: %s Tokens:", i, input, result)
for _, t := range result.Tokens { for _, t := range result.Tokens() {
fmt.Printf(" %s(%s)", t.Type, t.Value) fmt.Printf(" %s(%s)", t.Type, t.Value)
} }
fmt.Printf("\n") fmt.Printf("\n")

View File

@ -17,7 +17,7 @@ import (
// • call other parse.Handler functions, the core of recursive-descent parsing (Handle) // • call other parse.Handler functions, the core of recursive-descent parsing (Handle)
type API struct { type API struct {
tokenAPI *tokenize.API // the tokenize.API, used for communicating with tokenize.Handler functions tokenAPI *tokenize.API // the tokenize.API, used for communicating with tokenize.Handler functions
Result tokenize.Result // a struct, holding the results of the last Peek() or Accept() call Result *tokenize.Result // a struct, providing access to the results of the last successful Peek() or Accept() call
err error // parse error, retrieved by Error(), using API methods is denied when set err error // parse error, retrieved by Error(), using API methods is denied when set
stopped bool // a boolean set to true by Stop() stopped bool // a boolean set to true by Stop()
} }
@ -32,13 +32,8 @@ type API struct {
func (parseAPI *API) Peek(tokenHandler tokenize.Handler) bool { func (parseAPI *API) Peek(tokenHandler tokenize.Handler) bool {
tokenAPI := parseAPI.tokenAPI tokenAPI := parseAPI.tokenAPI
snap := tokenAPI.MakeSnapshot() snap := tokenAPI.MakeSnapshot()
parseAPI.Result.Tokens = nil
parseAPI.Result.Bytes = nil
ok := parseAPI.invokeTokenizeHandler("Peek", tokenHandler) ok := parseAPI.invokeTokenizeHandler("Peek", tokenHandler)
if ok { tokenAPI.Result.Store()
parseAPI.Result.Tokens = tokenAPI.Output.Tokens()
parseAPI.Result.Bytes = tokenAPI.Output.Bytes()
}
tokenAPI.RestoreSnapshot(snap) tokenAPI.RestoreSnapshot(snap)
return ok return ok
} }
@ -54,15 +49,9 @@ func (parseAPI *API) Accept(tokenHandler tokenize.Handler) bool {
ok := parseAPI.invokeTokenizeHandler("Accept", tokenHandler) ok := parseAPI.invokeTokenizeHandler("Accept", tokenHandler)
if ok { if ok {
// Keep track of the results as produced by this child. // Keep track of the results as produced by this child.
// TODO put in function and also in Peek() Record Cursor() / error too? tokenAPI.Result.Store()
parseAPI.Result.Tokens = tokenAPI.Output.Tokens()
parseAPI.Result.Bytes = tokenAPI.Output.Bytes()
// Now the results are stored, we can flush the results. // Flush the output as initialization for the next token handler.
// This does not empty the byte and token store, but it does move the
// pointers within those stores back to the start. By doing this,
// the stores will be reused for the upcoming calls, which saves on
// memory usage.
tokenAPI.Output.Flush() tokenAPI.Output.Flush()
// Also flush the input reader buffer. Accepting input means that we // Also flush the input reader buffer. Accepting input means that we
@ -75,13 +64,14 @@ func (parseAPI *API) Accept(tokenHandler tokenize.Handler) bool {
func (parseAPI *API) Skip(tokenHandler tokenize.Handler) bool { func (parseAPI *API) Skip(tokenHandler tokenize.Handler) bool {
tokenAPI := parseAPI.tokenAPI tokenAPI := parseAPI.tokenAPI
tokenAPI.Output.Suspend()
if !parseAPI.invokeTokenizeHandler("Skip", tokenHandler) { if !parseAPI.invokeTokenizeHandler("Skip", tokenHandler) {
tokenAPI.Output.Resume()
return false return false
} }
parseAPI.Result.Tokens = nil tokenAPI.Output.Resume()
parseAPI.Result.Bytes = nil tokenAPI.Result.Clear()
tokenAPI.Output.Flush()
tokenAPI.Input.Flush() tokenAPI.Input.Flush()
return true return true
} }

View File

@ -30,9 +30,10 @@ func New(startHandler Handler) Func {
callerPanic("New", "parsekit.parse.{name}(): {name}() called with nil input at {caller}") callerPanic("New", "parsekit.parse.{name}(): {name}() called with nil input at {caller}")
} }
return func(input interface{}) error { return func(input interface{}) error {
tokenAPI := tokenize.NewAPI(input)
api := &API{ api := &API{
tokenAPI: tokenize.NewAPI(input), tokenAPI: tokenAPI,
// NOOPCHECK loopCheck: make(map[uintptr]bool), Result: &tokenAPI.Result,
} }
if api.Handle(startHandler) { if api.Handle(startHandler) {
// Handle returned true, indicating that parsing could still continue. // Handle returned true, indicating that parsing could still continue.

View File

@ -35,7 +35,8 @@ func ExampleNew_usingTokens() {
if p.Accept(c.OneOrMore(tok.Rune("RUNE", a.AnyRune))) { if p.Accept(c.OneOrMore(tok.Rune("RUNE", a.AnyRune))) {
fmt.Printf("Runes accepted: %q\n", p.Result.String()) fmt.Printf("Runes accepted: %q\n", p.Result.String())
fmt.Printf("Tokens:\n") fmt.Printf("Tokens:\n")
for i, token := range p.Result.Tokens { tokens := p.Result.Tokens()
for i, token := range tokens {
fmt.Printf("[%d] %s\n", i, token) fmt.Printf("[%d] %s\n", i, token)
} }
} }

View File

@ -76,7 +76,8 @@ type API struct {
Input Input // access to a set of general input-related methods Input Input // access to a set of general input-related methods
Byte InputByteMode // access to a set of byte-based input methods Byte InputByteMode // access to a set of byte-based input methods
Rune InputRuneMode // access to a set of UTF8 rune-based input methods Rune InputRuneMode // access to a set of UTF8 rune-based input methods
Output Output // access to a set of output-related functionality Output Output // access to a set of output-related methods
Result Result // access to a set of result retrieval methods
outputTokens []Token // storage for accepted tokens outputTokens []Token // storage for accepted tokens
outputBytes []byte // storage for accepted bytes outputBytes []byte // storage for accepted bytes
} }
@ -85,10 +86,10 @@ type stackFrame struct {
offset int // the read offset, relative to the start of the reader buffer offset int // the read offset, relative to the start of the reader buffer
column int // the column at which the cursor is (0-indexed, relative to the start of the stack frame) column int // the column at which the cursor is (0-indexed, relative to the start of the stack frame)
line int // the line at which the cursor is (0-indexed, relative to the start of the stack frame) line int // the line at which the cursor is (0-indexed, relative to the start of the stack frame)
bytesStart int // the starting point in the API.bytes slice for runes produced by this stack level bytesStart int // the starting point in the API.bytes slice for produced bytes
bytesEnd int // the end point in the API.bytes slice for runes produced by this stack level bytesEnd int // the end point in the API.bytes slice for produced bytes
tokenStart int // the starting point in the API.tokens slice for tokens produced by this stack level tokenStart int // the starting point in the API.tokens slice for produced tokens
tokenEnd int // the end point in the API.tokens slice for tokens produced by this stack level tokenEnd int // the end point in the API.tokens slice for produced tokens
} }
// NewAPI initializes a new API struct, wrapped around the provided input. // NewAPI initializes a new API struct, wrapped around the provided input.
@ -99,9 +100,10 @@ func NewAPI(input interface{}) *API {
reader: read.New(input), reader: read.New(input),
} }
tokenAPI.Input = Input{api: tokenAPI} tokenAPI.Input = Input{api: tokenAPI}
tokenAPI.Byte = InputByteMode{api: tokenAPI} tokenAPI.Input.Byte = InputByteMode{api: tokenAPI}
tokenAPI.Rune = InputRuneMode{api: tokenAPI} tokenAPI.Input.Rune = InputRuneMode{api: tokenAPI}
tokenAPI.Output = Output{api: tokenAPI} tokenAPI.Output = Output{api: tokenAPI}
tokenAPI.Result = Result{api: tokenAPI}
return tokenAPI return tokenAPI
} }

View File

@ -25,7 +25,10 @@ func (byteMode InputByteMode) PeekMulti(offset int, count int) ([]byte, error) {
} }
func (byteMode InputByteMode) Accept(b byte) { func (byteMode InputByteMode) Accept(b byte) {
a := byteMode.api
if a.Output.suspended == 0 {
byteMode.api.Output.AddByte(b) byteMode.api.Output.AddByte(b)
}
byteMode.MoveCursor(b) byteMode.MoveCursor(b)
} }
@ -41,7 +44,10 @@ func (byteMode InputByteMode) Accept(b byte) {
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at // After the call, byte offset 0 for PeekByte() and PeekRune() will point at
// the first byte after the accepted bytes. // the first byte after the accepted bytes.
func (byteMode InputByteMode) AcceptMulti(bytes ...byte) { func (byteMode InputByteMode) AcceptMulti(bytes ...byte) {
byteMode.api.Output.AddBytes(bytes...) a := byteMode.api
if a.Output.suspended == 0 {
a.Output.AddBytes(bytes...)
}
byteMode.MoveCursorMulti(bytes...) byteMode.MoveCursorMulti(bytes...)
} }

View File

@ -8,6 +8,8 @@ import (
// which is not specifically bound to a specific read mode (byte, rune). // which is not specifically bound to a specific read mode (byte, rune).
type Input struct { type Input struct {
api *API api *API
Byte InputByteMode
Rune InputRuneMode
} }
// Cursor returns a string that describes the current read cursor position. // Cursor returns a string that describes the current read cursor position.

View File

@ -7,6 +7,15 @@ import (
// Output provides output-related functionality for the tokenize API. // Output provides output-related functionality for the tokenize API.
type Output struct { type Output struct {
api *API api *API
suspended int // whether or not the output has been suspended (i.e. value > 0)
}
func (o *Output) Suspend() {
o.suspended++
}
func (o *Output) Resume() {
o.suspended--
} }
func (o Output) Bytes() []byte { func (o Output) Bytes() []byte {
@ -30,23 +39,30 @@ func (o Output) Rune(offset int) rune {
func (o Output) Flush() { func (o Output) Flush() {
a := o.api a := o.api
a.pointers.bytesStart = 0 a.pointers.bytesStart = a.pointers.bytesEnd
a.pointers.bytesEnd = 0 a.pointers.tokenStart = a.pointers.tokenEnd
a.pointers.tokenStart = 0
a.pointers.tokenEnd = 0
} }
func (o Output) ClearData() { func (o Output) ClearData() {
if o.suspended > 0 {
return
}
a := o.api a := o.api
a.pointers.bytesEnd = a.pointers.bytesStart a.pointers.bytesEnd = a.pointers.bytesStart
} }
func (o Output) SetBytes(bytes ...byte) { func (o Output) SetBytes(bytes ...byte) {
if o.suspended > 0 {
return
}
o.ClearData() o.ClearData()
o.AddBytes(bytes...) o.AddBytes(bytes...)
} }
func (o Output) AddByte(b byte) { func (o Output) AddByte(b byte) {
if o.suspended > 0 {
return
}
a := o.api a := o.api
curBytesEnd := a.pointers.bytesEnd curBytesEnd := a.pointers.bytesEnd
a.growOutputData(curBytesEnd + 1) a.growOutputData(curBytesEnd + 1)
@ -55,11 +71,17 @@ func (o Output) AddByte(b byte) {
} }
func (o Output) SetRunes(runes ...rune) { func (o Output) SetRunes(runes ...rune) {
if o.suspended > 0 {
return
}
o.ClearData() o.ClearData()
o.AddRunes(runes...) o.AddRunes(runes...)
} }
func (o Output) AddBytes(bytes ...byte) { func (o Output) AddBytes(bytes ...byte) {
if o.suspended > 0 {
return
}
a := o.api a := o.api
curBytesEnd := a.pointers.bytesEnd curBytesEnd := a.pointers.bytesEnd
newBytesEnd := curBytesEnd + len(bytes) newBytesEnd := curBytesEnd + len(bytes)
@ -69,6 +91,9 @@ func (o Output) AddBytes(bytes ...byte) {
} }
func (o Output) AddRunes(runes ...rune) { func (o Output) AddRunes(runes ...rune) {
if o.suspended > 0 {
return
}
a := o.api a := o.api
runesAsString := string(runes) runesAsString := string(runes)
newBytesEnd := a.pointers.bytesEnd + len(runesAsString) newBytesEnd := a.pointers.bytesEnd + len(runesAsString)
@ -78,10 +103,16 @@ func (o Output) AddRunes(runes ...rune) {
} }
func (o Output) AddString(s string) { func (o Output) AddString(s string) {
if o.suspended > 0 {
return
}
o.AddBytes([]byte(s)...) o.AddBytes([]byte(s)...)
} }
func (o Output) SetString(s string) { func (o Output) SetString(s string) {
if o.suspended > 0 {
return
}
o.ClearData() o.ClearData()
o.AddBytes([]byte(s)...) o.AddBytes([]byte(s)...)
} }
@ -102,16 +133,25 @@ func (o Output) TokenValue(offset int) interface{} {
} }
func (o Output) ClearTokens() { func (o Output) ClearTokens() {
if o.suspended > 0 {
return
}
a := o.api a := o.api
a.pointers.tokenEnd = a.pointers.tokenStart a.pointers.tokenEnd = a.pointers.tokenStart
} }
func (o Output) SetTokens(tokens ...Token) { func (o Output) SetTokens(tokens ...Token) {
if o.suspended > 0 {
return
}
o.ClearTokens() o.ClearTokens()
o.AddTokens(tokens...) o.AddTokens(tokens...)
} }
func (o Output) AddToken(token Token) { func (o Output) AddToken(token Token) {
if o.suspended > 0 {
return
}
a := o.api a := o.api
tokenEnd := a.pointers.tokenEnd tokenEnd := a.pointers.tokenEnd
a.growOutputTokens(tokenEnd + 1) a.growOutputTokens(tokenEnd + 1)
@ -120,6 +160,9 @@ func (o Output) AddToken(token Token) {
} }
func (o Output) InsertTokenAtStart(token Token) { func (o Output) InsertTokenAtStart(token Token) {
if o.suspended > 0 {
return
}
a := o.api a := o.api
tokenEnd := a.pointers.tokenEnd tokenEnd := a.pointers.tokenEnd
tokenStart := a.pointers.tokenStart tokenStart := a.pointers.tokenStart
@ -134,6 +177,9 @@ func (o Output) InsertTokenAtStart(token Token) {
} }
func (o Output) AddTokens(tokens ...Token) { func (o Output) AddTokens(tokens ...Token) {
if o.suspended > 0 {
return
}
a := o.api a := o.api
a.growOutputTokens(a.pointers.tokenEnd + len(tokens)) a.growOutputTokens(a.pointers.tokenEnd + len(tokens))
for _, t := range tokens { for _, t := range tokens {

View File

@ -1,11 +1,57 @@
package tokenize package tokenize
import "unicode/utf8"
// Result holds the bytes and tokens as produced by the tokenizer. // Result holds the bytes and tokens as produced by the tokenizer.
type Result struct { type Result struct {
Tokens []Token api *API
Bytes []byte bytesStart int // the starting point in the API.bytes slice for runes produced by this stack level
bytesEnd int // the end point in the API.bytes slice for runes produced by this stack level
tokenStart int // the starting point in the API.tokens slice for tokens produced by this stack level
tokenEnd int // the end point in the API.tokens slice for tokens produced by this stack level
}
func (result *Result) Store() {
p := result.api.pointers
result.bytesStart = p.bytesStart
result.bytesEnd = p.bytesEnd
result.tokenStart = p.tokenStart
result.tokenEnd = p.tokenEnd
}
func (result *Result) Clear() {
result.bytesStart = 0
result.bytesEnd = 0
result.tokenStart = 0
result.tokenEnd = 0
} }
func (result *Result) String() string { func (result *Result) String() string {
return string(result.Bytes) return string(result.api.outputBytes[result.bytesStart:result.bytesEnd])
}
func (result *Result) Byte(offset int) byte {
return result.api.outputBytes[result.bytesStart+offset]
}
func (result *Result) Bytes() []byte {
return result.api.outputBytes[result.bytesStart:result.bytesEnd]
}
func (result *Result) Rune(offset int) rune {
r, _ := utf8.DecodeRune(result.api.outputBytes[result.bytesStart+offset:])
return r
}
func (result *Result) Runes() []rune {
return []rune(result.String())
}
func (result *Result) Token(offset int) Token {
a := result.api
return a.outputTokens[result.tokenStart+offset]
}
func (result *Result) Tokens() []Token {
return result.api.outputTokens[result.tokenStart:result.tokenEnd]
} }

View File

@ -43,6 +43,10 @@ func (runeMode InputRuneMode) Peek(offset int) (rune, int, error) {
// the first byte after the accepted rune. // the first byte after the accepted rune.
func (runeMode InputRuneMode) Accept(r rune) { func (runeMode InputRuneMode) Accept(r rune) {
a := runeMode.api a := runeMode.api
if a.Output.suspended > 0 {
runeMode.MoveCursor(r)
return
}
curBytesEnd := a.pointers.bytesEnd curBytesEnd := a.pointers.bytesEnd
maxRequiredBytes := curBytesEnd + utf8.UTFMax maxRequiredBytes := curBytesEnd + utf8.UTFMax
a.growOutputData(maxRequiredBytes) a.growOutputData(maxRequiredBytes)
@ -64,10 +68,13 @@ func (runeMode InputRuneMode) Accept(r rune) {
// the first byte after the accepted runes. // the first byte after the accepted runes.
func (runeMode InputRuneMode) AcceptMulti(runes ...rune) { func (runeMode InputRuneMode) AcceptMulti(runes ...rune) {
a := runeMode.api a := runeMode.api
if a.Output.suspended > 0 {
runeMode.MoveCursorMulti(runes...)
return
}
curBytesEnd := a.pointers.bytesEnd curBytesEnd := a.pointers.bytesEnd
maxBytes := curBytesEnd + len(runes)*utf8.UTFMax maxBytes := curBytesEnd + len(runes)*utf8.UTFMax
a.growOutputData(maxBytes) a.growOutputData(maxBytes)
for _, r := range runes { for _, r := range runes {
w := utf8.EncodeRune(a.outputBytes[curBytesEnd:], r) w := utf8.EncodeRune(a.outputBytes[curBytesEnd:], r)
curBytesEnd += w curBytesEnd += w

View File

@ -102,16 +102,17 @@ func AssertTokenMaker(t *testing.T, test TokenMakerT) {
if err != nil { if err != nil {
t.Errorf("Test %q failed with error: %s", test.Input, err) t.Errorf("Test %q failed with error: %s", test.Input, err)
} else { } else {
if len(result.Tokens) != len(test.Expected) { tokens := result.Tokens()
t.Errorf("Unexpected number of tokens in output:\nexpected: %d\nactual: %d", len(test.Expected), len(result.Tokens)) if len(tokens) != len(test.Expected) {
t.Errorf("Unexpected number of tokens in output:\nexpected: %d\nactual: %d", len(test.Expected), len(tokens))
} }
for i, expected := range test.Expected { for i, expected := range test.Expected {
actual := result.Tokens[i] actual := tokens[i]
if expected.Type != actual.Type { if expected.Type != actual.Type {
t.Errorf("Unexpected Type in result.Tokens[%d]:\nexpected: (%T) %s\nactual: (%T) %s", i, expected.Type, expected.Type, actual.Type, actual.Type) t.Errorf("Unexpected Type in result.Tokens, idx %d:\nexpected: (%T) %s\nactual: (%T) %s", i, expected.Type, expected.Type, actual.Type, actual.Type)
} }
if expected.Value != actual.Value { if expected.Value != actual.Value {
t.Errorf("Unexpected Value in result.Tokens[%d]:\nexpected: (%T) %s\nactual: (%T) %s", i, expected.Value, expected.Value, actual.Value, actual.Value) t.Errorf("Unexpected Value in result.Tokens, idx %d:\nexpected: (%T) %s\nactual: (%T) %s", i, expected.Value, expected.Value, actual.Value, actual.Value)
} }
} }
} }

View File

@ -62,7 +62,8 @@ func ExampleHandler_SeparatedBy() {
csv := t.Int("number", a.Digits).SeparatedBy(a.Comma) csv := t.Int("number", a.Digits).SeparatedBy(a.Comma)
r, _ := csv.Match("123,456,7,8,9") r, _ := csv.Match("123,456,7,8,9")
for i, token := range r.Tokens { tokens := r.Tokens()
for i, token := range tokens {
fmt.Printf("[%d] %v\n", i, token) fmt.Printf("[%d] %v\n", i, token)
} }
// Output: // Output:

View File

@ -1516,16 +1516,10 @@ func MatchIPv6Net(normalize bool) Handler {
// In both cases, it would match the first form. // In both cases, it would match the first form.
func ModifyDrop(handler Handler) Handler { func ModifyDrop(handler Handler) Handler {
return func(tokenAPI *API) bool { return func(tokenAPI *API) bool {
runeEnd := tokenAPI.pointers.bytesEnd tokenAPI.Output.Suspend()
tokenEnd := tokenAPI.pointers.tokenEnd ok := handler(tokenAPI)
if handler(tokenAPI) { tokenAPI.Output.Resume()
// We keep offset and cursor updates, but rollback any runes / tokens return ok
// that were added by the handler.
tokenAPI.pointers.bytesEnd = runeEnd
tokenAPI.pointers.tokenEnd = tokenEnd
return true
}
return false
} }
} }

View File

@ -459,7 +459,7 @@ func TestTokenGroup_Match(t *testing.T) {
api, err := tokenizer("xxxxx") api, err := tokenizer("xxxxx")
AssertTrue(t, err == nil, "Tokenizer result") AssertTrue(t, err == nil, "Tokenizer result")
tokens := api.Tokens tokens := api.Tokens()
AssertEqual(t, 1, len(tokens), "Length of tokens slice") AssertEqual(t, 1, len(tokens), "Length of tokens slice")
contained := tokens[0].Value.([]tokenize.Token) contained := tokens[0].Value.([]tokenize.Token)
AssertEqual(t, 3, len(contained), "Length of contained tokens") AssertEqual(t, 3, len(contained), "Length of contained tokens")
@ -475,7 +475,7 @@ func TestTokenGroup_Mismatch(t *testing.T) {
api, err := tokenizer("12345") api, err := tokenizer("12345")
AssertTrue(t, err == nil, "Tokenizer result") AssertTrue(t, err == nil, "Tokenizer result")
tokens := api.Tokens tokens := api.Tokens()
AssertEqual(t, 0, len(tokens), "Length of tokens slice") AssertEqual(t, 0, len(tokens), "Length of tokens slice")
} }

View File

@ -36,10 +36,8 @@ func New(tokenHandler Handler) Func {
err := fmt.Errorf("mismatch at %s", tokenAPI.Input.Cursor()) err := fmt.Errorf("mismatch at %s", tokenAPI.Input.Cursor())
return nil, err return nil, err
} }
result := &Result{
Bytes: tokenAPI.Output.Bytes(), tokenAPI.Result.Store()
Tokens: tokenAPI.Output.Tokens(), return &tokenAPI.Result, nil
}
return result, nil
} }
} }

View File

@ -38,9 +38,9 @@ func ExampleNew() {
} { } {
// Execute returns a Result and an error, which is nil on success. // Execute returns a Result and an error, which is nil on success.
result, err := tokenizer(input) result, err := tokenizer(input)
if err == nil { if err == nil {
fmt.Printf("Result: %s\n", result.Tokens) tokens := result.Tokens()
fmt.Printf("Result: %s\n", tokens)
} else { } else {
fmt.Printf("Error: %s\n", err) fmt.Printf("Error: %s\n", err)
} }