package tokenize import "unicode/utf8" // Result holds the bytes and tokens as produced by the tokenizer. type Result struct { api *API bytesStart int // the starting point in the API.bytes slice for runes produced by this stack level bytesEnd int // the end point in the API.bytes slice for runes produced by this stack level tokenStart int // the starting point in the API.tokens slice for tokens produced by this stack level tokenEnd int // the end point in the API.tokens slice for tokens produced by this stack level } func (result *Result) Store() { p := result.api.pointers result.bytesStart = p.bytesStart result.bytesEnd = p.bytesEnd result.tokenStart = p.tokenStart result.tokenEnd = p.tokenEnd } func (result *Result) Clear() { result.bytesStart = 0 result.bytesEnd = 0 result.tokenStart = 0 result.tokenEnd = 0 } func (result *Result) String() string { return string(result.api.outputBytes[result.bytesStart:result.bytesEnd]) } func (result *Result) Byte(offset int) byte { return result.api.outputBytes[result.bytesStart+offset] } func (result *Result) Bytes() []byte { return result.api.outputBytes[result.bytesStart:result.bytesEnd] } func (result *Result) Rune(offset int) rune { r, _ := utf8.DecodeRune(result.api.outputBytes[result.bytesStart+offset:]) return r } func (result *Result) Runes() []rune { return []rune(result.String()) } func (result *Result) Token(offset int) Token { a := result.api return a.outputTokens[result.tokenStart+offset] } func (result *Result) Tokens() []Token { return result.api.outputTokens[result.tokenStart:result.tokenEnd] }