go-parsekit/tokenize/api_output.go

114 lines
2.6 KiB
Go

package tokenize
import (
"unicode/utf8"
)
// Output provides output-related functionality for the tokenize API.
type Output struct {
api *API
tokens []Token // accepted tokens
data []byte // accepted data
}
func (o Output) String() string {
bytes := o.data[o.api.stackFrame.bytesStart:o.api.stackFrame.bytesEnd]
return string(bytes)
}
func (o Output) Runes() []rune {
bytes := o.data[o.api.stackFrame.bytesStart:o.api.stackFrame.bytesEnd]
return []rune(string(bytes))
}
func (o Output) Rune(offset int) rune {
r, _ := utf8.DecodeRune(o.data[o.api.stackFrame.bytesStart+offset:])
return r
}
func (o Output) ClearData() {
o.api.stackFrame.bytesEnd = o.api.stackFrame.bytesStart
}
func (o Output) SetBytes(bytes ...byte) {
o.ClearData()
o.AddBytes(bytes...)
}
func (o Output) AddBytes(bytes ...byte) {
// Grow the runes capacity when needed.
newBytesEnd := o.api.stackFrame.bytesEnd + len(bytes)
if cap(o.data) < newBytesEnd {
newBytes := make([]byte, newBytesEnd*2)
copy(newBytes, o.data)
o.data = newBytes
}
copy(o.data[o.api.stackFrame.bytesEnd:], bytes)
o.api.stackFrame.bytesEnd = newBytesEnd
}
func (o Output) SetRunes(runes ...rune) {
o.ClearData()
o.AddRunes(runes...)
}
func (o Output) AddRunes(runes ...rune) {
// Grow the runes capacity when needed.
runesAsString := string(runes)
newBytesEnd := o.api.stackFrame.bytesEnd + len(runesAsString)
if cap(o.data) < newBytesEnd {
newBytes := make([]byte, newBytesEnd*2)
copy(newBytes, o.data)
o.data = newBytes
}
copy(o.data[o.api.stackFrame.bytesEnd:], runesAsString)
o.api.stackFrame.bytesEnd = newBytesEnd
}
func (o Output) AddString(s string) {
o.AddBytes([]byte(s)...)
}
func (o Output) SetString(s string) {
o.ClearData()
o.SetBytes([]byte(s)...)
}
func (o Output) Tokens() []Token {
return o.tokens[o.api.stackFrame.tokenStart:o.api.stackFrame.tokenEnd]
}
func (o Output) Token(offset int) Token {
return o.tokens[o.api.stackFrame.tokenStart+offset]
}
func (o Output) TokenValue(offset int) interface{} {
return o.tokens[o.api.stackFrame.tokenStart+offset].Value
}
func (o Output) ClearTokens() {
o.api.stackFrame.tokenEnd = o.api.stackFrame.tokenStart
}
func (o Output) SetTokens(tokens ...Token) {
o.ClearTokens()
o.AddTokens(tokens...)
}
func (o Output) AddTokens(tokens ...Token) {
// Grow the tokens capacity when needed.
newTokenEnd := o.api.stackFrame.tokenEnd + len(tokens)
if cap(o.tokens) < newTokenEnd {
newTokens := make([]Token, newTokenEnd*2)
copy(newTokens, o.tokens)
o.tokens = newTokens
}
for offset, t := range tokens {
o.tokens[o.api.stackFrame.tokenEnd+offset] = t
}
o.api.stackFrame.tokenEnd = newTokenEnd
}