Moved input-related functions to their own API.Input struct.
This commit is contained in:
parent
9d98c9dff7
commit
7d2d8dbed3
|
@ -54,7 +54,7 @@ func (p *API) Accept(tokenHandler tokenize.Handler) bool {
|
||||||
p.tokenAPI.Dispose(forkedAPI)
|
p.tokenAPI.Dispose(forkedAPI)
|
||||||
|
|
||||||
// And flush the input reader buffer.
|
// And flush the input reader buffer.
|
||||||
p.tokenAPI.FlushInput()
|
p.tokenAPI.Input.Flush()
|
||||||
} else {
|
} else {
|
||||||
p.tokenAPI.Dispose(forkedAPI)
|
p.tokenAPI.Dispose(forkedAPI)
|
||||||
}
|
}
|
||||||
|
@ -159,7 +159,7 @@ func (p *API) Error(format string, data ...interface{}) {
|
||||||
// No call to p.panicWhenStoppedOrInError(), to allow a parser to
|
// No call to p.panicWhenStoppedOrInError(), to allow a parser to
|
||||||
// set a different error message when needed.
|
// set a different error message when needed.
|
||||||
message := fmt.Sprintf(format, data...)
|
message := fmt.Sprintf(format, data...)
|
||||||
p.err = fmt.Errorf("%s at %s", message, p.tokenAPI.Cursor())
|
p.err = fmt.Errorf("%s at %s", message, p.tokenAPI.Input.Cursor())
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpectEndOfFile can be used to check if the input is at end of file.
|
// ExpectEndOfFile can be used to check if the input is at end of file.
|
||||||
|
@ -191,7 +191,7 @@ func (p *API) ExpectEndOfFile() {
|
||||||
// • there was an error while reading the input.
|
// • there was an error while reading the input.
|
||||||
func (p *API) Expected(expected string) {
|
func (p *API) Expected(expected string) {
|
||||||
p.panicWhenStoppedOrInError("Expected")
|
p.panicWhenStoppedOrInError("Expected")
|
||||||
_, err := p.tokenAPI.PeekByte(0)
|
_, err := p.tokenAPI.Input.PeekByte(0)
|
||||||
switch {
|
switch {
|
||||||
case err == nil:
|
case err == nil:
|
||||||
p.Error("unexpected input%s", fmtExpects(expected))
|
p.Error("unexpected input%s", fmtExpects(expected))
|
||||||
|
|
476
tokenize/api.go
476
tokenize/api.go
|
@ -78,6 +78,7 @@ type API struct {
|
||||||
stackFrames []stackFrame // the stack frames, containing stack level-specific data
|
stackFrames []stackFrame // the stack frames, containing stack level-specific data
|
||||||
stackLevel int // the current stack level
|
stackLevel int // the current stack level
|
||||||
stackFrame *stackFrame // the current stack frame
|
stackFrame *stackFrame // the current stack frame
|
||||||
|
Input Input // provides input-related functionality
|
||||||
Output Output // provides output-related functionality
|
Output Output // provides output-related functionality
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,6 +95,11 @@ type stackFrame struct {
|
||||||
err error // can be used by a Handler to report a specific issue with the input
|
err error // can be used by a Handler to report a specific issue with the input
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Input provides input-related functionality for the tokenize API.
|
||||||
|
type Input struct {
|
||||||
|
api *API
|
||||||
|
}
|
||||||
|
|
||||||
// Output provides output-related functionality for the tokenize API.
|
// Output provides output-related functionality for the tokenize API.
|
||||||
type Output struct {
|
type Output struct {
|
||||||
api *API
|
api *API
|
||||||
|
@ -113,221 +119,13 @@ func NewAPI(input interface{}) *API {
|
||||||
tokens: make([]Token, initialTokenStoreLength),
|
tokens: make([]Token, initialTokenStoreLength),
|
||||||
stackFrames: make([]stackFrame, initialStackDepth),
|
stackFrames: make([]stackFrame, initialStackDepth),
|
||||||
}
|
}
|
||||||
|
api.Input = Input{api: api}
|
||||||
api.Output = Output{api: api}
|
api.Output = Output{api: api}
|
||||||
api.stackFrame = &api.stackFrames[0]
|
api.stackFrame = &api.stackFrames[0]
|
||||||
|
|
||||||
return api
|
return api
|
||||||
}
|
}
|
||||||
|
|
||||||
// PeekByte returns the byte at the provided byte offset.
|
|
||||||
//
|
|
||||||
// When an error occurs during reading the input, an error will be returned.
|
|
||||||
// When an offset is requested that is beyond the length of the available input
|
|
||||||
// data, then the error will be io.EOF.
|
|
||||||
func (i *API) PeekByte(offset int) (byte, error) {
|
|
||||||
return i.reader.ByteAt(i.stackFrame.offset + offset)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SkipByte is used to skip over a single bytes that was read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen this byte. It is of no interest.
|
|
||||||
// I will now continue reading after this byte."
|
|
||||||
//
|
|
||||||
// This will merely update the position of the cursor (which keeps track of what
|
|
||||||
// line and column we are on in the input data). The byte is not added to
|
|
||||||
// the results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the skipped byte.
|
|
||||||
func (i *API) SkipByte(b byte) {
|
|
||||||
i.stackFrame.moveCursorByByte(b)
|
|
||||||
i.stackFrame.offset++
|
|
||||||
}
|
|
||||||
|
|
||||||
// SkipBytes is used to skip over one or more bytes that were read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen these bytes. They are of no interest.
|
|
||||||
// I will now continue reading after these bytes."
|
|
||||||
//
|
|
||||||
// This will merely update the position of the cursor (which keeps track of what
|
|
||||||
// line and column we are on in the input data). The bytes are not added to
|
|
||||||
// the results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the skipped bytes.
|
|
||||||
func (i *API) SkipBytes(bytes ...byte) {
|
|
||||||
for _, b := range bytes {
|
|
||||||
i.stackFrame.moveCursorByByte(b)
|
|
||||||
i.stackFrame.offset++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// AcceptByte is used to accept a single byte that was read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen this byte. I want to make use of it
|
|
||||||
// for the final output, so please remember it for me. I will now continue
|
|
||||||
// reading after this byte."
|
|
||||||
//
|
|
||||||
// This will update the position of the cursor (which keeps track of what line
|
|
||||||
// and column we are on in the input data) and add the byte to the tokenizer
|
|
||||||
// results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the accepted byte.
|
|
||||||
func (i *API) AcceptByte(b byte) {
|
|
||||||
curBytesEnd := i.stackFrame.bytesEnd
|
|
||||||
maxRequiredBytes := curBytesEnd + 1
|
|
||||||
|
|
||||||
// Grow the bytes capacity when needed.
|
|
||||||
if cap(i.bytes) < maxRequiredBytes {
|
|
||||||
newBytes := make([]byte, maxRequiredBytes*2)
|
|
||||||
copy(newBytes, i.bytes)
|
|
||||||
i.bytes = newBytes
|
|
||||||
}
|
|
||||||
|
|
||||||
i.bytes[curBytesEnd] = b
|
|
||||||
i.stackFrame.moveCursorByByte(b)
|
|
||||||
i.stackFrame.bytesEnd++
|
|
||||||
i.stackFrame.offset++
|
|
||||||
}
|
|
||||||
|
|
||||||
// AcceptBytes is used to accept one or more bytes that were read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen these bytes. I want to make use of them
|
|
||||||
// for the final output, so please remember them for me. I will now continue
|
|
||||||
// reading after these bytes."
|
|
||||||
//
|
|
||||||
// This will update the position of the cursor (which keeps track of what line
|
|
||||||
// and column we are on in the input data) and add the bytes to the tokenizer
|
|
||||||
// results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the accepted bytes.
|
|
||||||
func (i *API) AcceptBytes(bytes ...byte) {
|
|
||||||
curBytesEnd := i.stackFrame.bytesEnd
|
|
||||||
newBytesEnd := curBytesEnd + len(bytes)
|
|
||||||
|
|
||||||
// Grow the bytes capacity when needed.
|
|
||||||
if cap(i.bytes) < newBytesEnd {
|
|
||||||
newBytes := make([]byte, newBytesEnd*2)
|
|
||||||
copy(newBytes, i.bytes)
|
|
||||||
i.bytes = newBytes
|
|
||||||
}
|
|
||||||
|
|
||||||
copy(i.bytes[curBytesEnd:], bytes)
|
|
||||||
for _, b := range bytes {
|
|
||||||
i.stackFrame.moveCursorByByte(b)
|
|
||||||
i.stackFrame.offset++
|
|
||||||
}
|
|
||||||
i.stackFrame.bytesEnd = newBytesEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
// PeekRune returns the UTF8 rune at the provided byte offset, including its byte width.
|
|
||||||
//
|
|
||||||
// The byte width is useful to know what byte offset you'll have to use to peek
|
|
||||||
// the next byte or rune. Some UTF8 runes take up 4 bytes of data, so when the
|
|
||||||
// first rune starts at offset = 0, the second rune might start at offset = 4.
|
|
||||||
//
|
|
||||||
// When an invalid UTF8 rune is encountered on the input, it is replaced with
|
|
||||||
// the utf.RuneError rune. It's up to the caller to handle this as an error
|
|
||||||
// when needed.
|
|
||||||
//
|
|
||||||
// When an error occurs during reading the input, an error will be returned.
|
|
||||||
// When an offset is requested that is beyond the length of the available input
|
|
||||||
// data, then the error will be io.EOF.
|
|
||||||
func (i *API) PeekRune(offset int) (rune, int, error) {
|
|
||||||
return i.reader.RuneAt(i.stackFrame.offset + offset)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SkipRune is used to skip over a single rune that was read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen this rune. It is of no interest.
|
|
||||||
// I will now continue reading after this rune."
|
|
||||||
//
|
|
||||||
// This will merely update the position of the cursor (which keeps track of what
|
|
||||||
// line and column we are on in the input data). The rune is not added to
|
|
||||||
// the results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the skipped rune.
|
|
||||||
func (i *API) SkipRune(r rune) {
|
|
||||||
i.stackFrame.moveCursorByRune(r)
|
|
||||||
i.stackFrame.offset += utf8.RuneLen(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SkipRunes is used to skip over one or more runes that were read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen these runes. They are of no interest.
|
|
||||||
// I will now continue reading after these runes."
|
|
||||||
//
|
|
||||||
// This will merely update the position of the cursor (which keeps track of what
|
|
||||||
// line and column we are on in the input data). The runes are not added to
|
|
||||||
// the results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the skipped runes.
|
|
||||||
func (i *API) SkipRunes(runes ...rune) {
|
|
||||||
for _, r := range runes {
|
|
||||||
i.stackFrame.moveCursorByRune(r)
|
|
||||||
i.stackFrame.offset += utf8.RuneLen(r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// AcceptRune is used to accept a single rune that was read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen this rune. I want to make use of it
|
|
||||||
// for the final output, so please remember it for me. I will now continue
|
|
||||||
// reading after this rune."
|
|
||||||
//
|
|
||||||
// This will update the position of the cursor (which keeps track of what line
|
|
||||||
// and column we are on in the input data) and add the rune to the tokenizer
|
|
||||||
// results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the accepted rune.
|
|
||||||
func (i *API) AcceptRune(r rune) {
|
|
||||||
curBytesEnd := i.stackFrame.bytesEnd
|
|
||||||
maxRequiredBytes := curBytesEnd + utf8.UTFMax
|
|
||||||
|
|
||||||
// Grow the runes capacity when needed.
|
|
||||||
if cap(i.bytes) < maxRequiredBytes {
|
|
||||||
newBytes := make([]byte, maxRequiredBytes*2)
|
|
||||||
copy(newBytes, i.bytes)
|
|
||||||
i.bytes = newBytes
|
|
||||||
}
|
|
||||||
|
|
||||||
i.stackFrame.moveCursorByRune(r)
|
|
||||||
w := utf8.EncodeRune(i.bytes[curBytesEnd:], r)
|
|
||||||
i.stackFrame.bytesEnd += w
|
|
||||||
i.stackFrame.offset += w
|
|
||||||
}
|
|
||||||
|
|
||||||
// AcceptRunes is used to accept one or more runes that were read from the input.
|
|
||||||
// This tells the tokenizer: "I've seen these runes. I want to make use of them
|
|
||||||
// for the final output, so please remember them for me. I will now continue
|
|
||||||
// reading after these runes."
|
|
||||||
//
|
|
||||||
// This will update the position of the cursor (which keeps track of what line
|
|
||||||
// and column we are on in the input data) and add the runes to the tokenizer
|
|
||||||
// results.
|
|
||||||
//
|
|
||||||
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
|
||||||
// the first byte after the accepted runes.
|
|
||||||
func (i *API) AcceptRunes(runes ...rune) {
|
|
||||||
runesAsString := string(runes)
|
|
||||||
byteLen := len(runesAsString)
|
|
||||||
curBytesEnd := i.stackFrame.bytesEnd
|
|
||||||
newBytesEnd := curBytesEnd + byteLen
|
|
||||||
|
|
||||||
// Grow the runes capacity when needed.
|
|
||||||
if cap(i.bytes) < newBytesEnd {
|
|
||||||
newBytes := make([]byte, newBytesEnd*2)
|
|
||||||
copy(newBytes, i.bytes)
|
|
||||||
i.bytes = newBytes
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range runes {
|
|
||||||
i.stackFrame.moveCursorByRune(r)
|
|
||||||
}
|
|
||||||
copy(i.bytes[curBytesEnd:], runesAsString)
|
|
||||||
|
|
||||||
i.stackFrame.bytesEnd = newBytesEnd
|
|
||||||
i.stackFrame.offset += byteLen
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fork forks off a child of the API struct. It will reuse the same
|
// Fork forks off a child of the API struct. It will reuse the same
|
||||||
// read buffer and cursor position, but for the rest this can be considered
|
// read buffer and cursor position, but for the rest this can be considered
|
||||||
// a fresh API.
|
// a fresh API.
|
||||||
|
@ -436,32 +234,251 @@ func (i *API) Dispose(stackLevel int) {
|
||||||
i.stackFrame = &i.stackFrames[stackLevel-1]
|
i.stackFrame = &i.stackFrames[stackLevel-1]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *API) Reset() {
|
// Reset moves the input cursor back to the beginning for the currently active API child.
|
||||||
if i.stackLevel == 0 {
|
// Aditionally, any output (bytes and tokens) that was emitted from the API child are
|
||||||
i.stackFrame.column = 0
|
// cleared as well.
|
||||||
i.stackFrame.line = 0
|
func (i Input) Reset() {
|
||||||
i.stackFrame.offset = 0
|
if i.api.stackLevel == 0 {
|
||||||
|
i.api.stackFrame.column = 0
|
||||||
|
i.api.stackFrame.line = 0
|
||||||
|
i.api.stackFrame.offset = 0
|
||||||
} else {
|
} else {
|
||||||
parent := i.stackFrames[i.stackLevel-1]
|
parent := i.api.stackFrames[i.api.stackLevel-1]
|
||||||
i.stackFrame.column = parent.column
|
i.api.stackFrame.column = parent.column
|
||||||
i.stackFrame.line = parent.line
|
i.api.stackFrame.line = parent.line
|
||||||
i.stackFrame.offset = parent.offset
|
i.api.stackFrame.offset = parent.offset
|
||||||
}
|
}
|
||||||
i.stackFrame.bytesEnd = i.stackFrame.bytesStart
|
i.api.stackFrame.bytesEnd = i.api.stackFrame.bytesStart
|
||||||
i.stackFrame.tokenEnd = i.stackFrame.tokenStart
|
i.api.stackFrame.tokenEnd = i.api.stackFrame.tokenStart
|
||||||
i.stackFrame.err = nil
|
i.api.stackFrame.err = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlushInput flushes input data from the read.Buffer up to the current
|
func (i Input) Cursor() string {
|
||||||
|
if i.api.stackFrame.line == 0 && i.api.stackFrame.column == 0 {
|
||||||
|
return fmt.Sprintf("start of file")
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("line %d, column %d", i.api.stackFrame.line+1, i.api.stackFrame.column+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PeekByte returns the byte at the provided byte offset.
|
||||||
|
//
|
||||||
|
// When an error occurs during reading the input, an error will be returned.
|
||||||
|
// When an offset is requested that is beyond the length of the available input
|
||||||
|
// data, then the error will be io.EOF.
|
||||||
|
func (i Input) PeekByte(offset int) (byte, error) {
|
||||||
|
return i.api.reader.ByteAt(i.api.stackFrame.offset + offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SkipByte is used to skip over a single bytes that was read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen this byte. It is of no interest.
|
||||||
|
// I will now continue reading after this byte."
|
||||||
|
//
|
||||||
|
// This will merely update the position of the cursor (which keeps track of what
|
||||||
|
// line and column we are on in the input data). The byte is not added to
|
||||||
|
// the output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the skipped byte.
|
||||||
|
func (i Input) SkipByte(b byte) {
|
||||||
|
i.api.stackFrame.moveCursorByByte(b)
|
||||||
|
i.api.stackFrame.offset++
|
||||||
|
}
|
||||||
|
|
||||||
|
// SkipBytes is used to skip over one or more bytes that were read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen these bytes. They are of no interest.
|
||||||
|
// I will now continue reading after these bytes."
|
||||||
|
//
|
||||||
|
// This will merely update the position of the cursor (which keeps track of what
|
||||||
|
// line and column we are on in the input data). The bytes are not added to
|
||||||
|
// the output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the skipped bytes.
|
||||||
|
func (i Input) SkipBytes(bytes ...byte) {
|
||||||
|
for _, b := range bytes {
|
||||||
|
i.api.stackFrame.moveCursorByByte(b)
|
||||||
|
i.api.stackFrame.offset++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcceptByte is used to accept a single byte that was read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen this byte. I want to make use of it
|
||||||
|
// for the final output, so please remember it for me. I will now continue
|
||||||
|
// reading after this byte."
|
||||||
|
//
|
||||||
|
// This will update the position of the cursor (which keeps track of what line
|
||||||
|
// and column we are on in the input data) and add the byte to the tokenizer
|
||||||
|
// output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the accepted byte.
|
||||||
|
func (i Input) AcceptByte(b byte) {
|
||||||
|
curBytesEnd := i.api.stackFrame.bytesEnd
|
||||||
|
maxRequiredBytes := curBytesEnd + 1
|
||||||
|
|
||||||
|
// Grow the bytes capacity when needed.
|
||||||
|
if cap(i.api.bytes) < maxRequiredBytes {
|
||||||
|
newBytes := make([]byte, maxRequiredBytes*2)
|
||||||
|
copy(newBytes, i.api.bytes)
|
||||||
|
i.api.bytes = newBytes
|
||||||
|
}
|
||||||
|
|
||||||
|
i.api.bytes[curBytesEnd] = b
|
||||||
|
i.api.stackFrame.moveCursorByByte(b)
|
||||||
|
i.api.stackFrame.bytesEnd++
|
||||||
|
i.api.stackFrame.offset++
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcceptBytes is used to accept one or more bytes that were read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen these bytes. I want to make use of them
|
||||||
|
// for the final output, so please remember them for me. I will now continue
|
||||||
|
// reading after these bytes."
|
||||||
|
//
|
||||||
|
// This will update the position of the cursor (which keeps track of what line
|
||||||
|
// and column we are on in the input data) and add the bytes to the tokenizer
|
||||||
|
// output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the accepted bytes.
|
||||||
|
func (i Input) AcceptBytes(bytes ...byte) {
|
||||||
|
curBytesEnd := i.api.stackFrame.bytesEnd
|
||||||
|
newBytesEnd := curBytesEnd + len(bytes)
|
||||||
|
|
||||||
|
// Grow the bytes capacity when needed.
|
||||||
|
if cap(i.api.bytes) < newBytesEnd {
|
||||||
|
newBytes := make([]byte, newBytesEnd*2)
|
||||||
|
copy(newBytes, i.api.bytes)
|
||||||
|
i.api.bytes = newBytes
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(i.api.bytes[curBytesEnd:], bytes)
|
||||||
|
for _, b := range bytes {
|
||||||
|
i.api.stackFrame.moveCursorByByte(b)
|
||||||
|
i.api.stackFrame.offset++
|
||||||
|
}
|
||||||
|
i.api.stackFrame.bytesEnd = newBytesEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
// PeekRune returns the UTF8 rune at the provided byte offset, including its byte width.
|
||||||
|
//
|
||||||
|
// The byte width is useful to know what byte offset you'll have to use to peek
|
||||||
|
// the next byte or rune. Some UTF8 runes take up 4 bytes of data, so when the
|
||||||
|
// first rune starts at offset = 0, the second rune might start at offset = 4.
|
||||||
|
//
|
||||||
|
// When an invalid UTF8 rune is encountered on the input, it is replaced with
|
||||||
|
// the utf.RuneError rune. It's up to the caller to handle this as an error
|
||||||
|
// when needed.
|
||||||
|
//
|
||||||
|
// When an error occurs during reading the input, an error will be returned.
|
||||||
|
// When an offset is requested that is beyond the length of the available input
|
||||||
|
// data, then the error will be io.EOF.
|
||||||
|
func (i Input) PeekRune(offset int) (rune, int, error) {
|
||||||
|
return i.api.reader.RuneAt(i.api.stackFrame.offset + offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SkipRune is used to skip over a single rune that was read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen this rune. It is of no interest.
|
||||||
|
// I will now continue reading after this rune."
|
||||||
|
//
|
||||||
|
// This will merely update the position of the cursor (which keeps track of what
|
||||||
|
// line and column we are on in APIthe input data). The rune is not added to
|
||||||
|
// the output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the skipped rune.
|
||||||
|
func (i Input) SkipRune(r rune) {
|
||||||
|
i.api.stackFrame.moveCursorByRune(r)
|
||||||
|
i.api.stackFrame.offset += utf8.RuneLen(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SkipRunes is used to skip over one or more runes that were read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen these runes. They are of no interest.
|
||||||
|
// I will now continue reading after these runes."
|
||||||
|
//
|
||||||
|
// This will merely update the position of the cursor (which keeps track of what
|
||||||
|
// line and column we are on in the input data). The runes are not added to
|
||||||
|
// the output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the skipped runes.
|
||||||
|
func (i Input) SkipRunes(runes ...rune) {
|
||||||
|
for _, r := range runes {
|
||||||
|
i.api.stackFrame.moveCursorByRune(r)
|
||||||
|
i.api.stackFrame.offset += utf8.RuneLen(r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcceptRune is used to accept a single rune that was read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen this rune. I want to make use of it
|
||||||
|
// for the final output, so please remember it for me. I will now continue
|
||||||
|
// reading after this rune."
|
||||||
|
//
|
||||||
|
// This will update the position of the cursor (which keeps track of what line
|
||||||
|
// and column we are on in the input data) and add the rune to the tokenizer
|
||||||
|
// output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the accepted rune.
|
||||||
|
func (i Input) AcceptRune(r rune) {
|
||||||
|
curBytesEnd := i.api.stackFrame.bytesEnd
|
||||||
|
maxRequiredBytes := curBytesEnd + utf8.UTFMax
|
||||||
|
|
||||||
|
// Grow the runes capacity when needed.
|
||||||
|
if cap(i.api.bytes) < maxRequiredBytes {
|
||||||
|
newBytes := make([]byte, maxRequiredBytes*2)
|
||||||
|
copy(newBytes, i.api.bytes)
|
||||||
|
i.api.bytes = newBytes
|
||||||
|
}
|
||||||
|
|
||||||
|
i.api.stackFrame.moveCursorByRune(r)
|
||||||
|
w := utf8.EncodeRune(i.api.bytes[curBytesEnd:], r)
|
||||||
|
i.api.stackFrame.bytesEnd += w
|
||||||
|
i.api.stackFrame.offset += w
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcceptRunes is used to accept one or more runes that were read from the input.
|
||||||
|
// This tells the tokenizer: "I've seen these runes. I want to make use of them
|
||||||
|
// for the final output, so please remember them for me. I will now continue
|
||||||
|
// reading after these runes."
|
||||||
|
//
|
||||||
|
// This will update the position of the cursor (which keeps track of what line
|
||||||
|
// and column we are on in the input data) and add the runes to the tokenizer
|
||||||
|
// output.
|
||||||
|
//
|
||||||
|
// After the call, byte offset 0 for PeekByte() and PeekRune() will point at
|
||||||
|
// the first byte after the accepted runes.
|
||||||
|
func (i Input) AcceptRunes(runes ...rune) {
|
||||||
|
runesAsString := string(runes)
|
||||||
|
byteLen := len(runesAsString)
|
||||||
|
curBytesEnd := i.api.stackFrame.bytesEnd
|
||||||
|
newBytesEnd := curBytesEnd + byteLen
|
||||||
|
|
||||||
|
// Grow the runes capacity when needed.
|
||||||
|
if cap(i.api.bytes) < newBytesEnd {
|
||||||
|
newBytes := make([]byte, newBytesEnd*2)
|
||||||
|
copy(newBytes, i.api.bytes)
|
||||||
|
i.api.bytes = newBytes
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range runes {
|
||||||
|
i.api.stackFrame.moveCursorByRune(r)
|
||||||
|
}
|
||||||
|
copy(i.api.bytes[curBytesEnd:], runesAsString)
|
||||||
|
|
||||||
|
i.api.stackFrame.bytesEnd = newBytesEnd
|
||||||
|
i.api.stackFrame.offset += byteLen
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush flushes input data from the read.Buffer up to the current
|
||||||
// read offset of the parser.
|
// read offset of the parser.
|
||||||
//
|
//
|
||||||
// Note:
|
// Note:
|
||||||
// When writing your own TokenHandler, you normally won't have to call this
|
// When writing your own TokenHandler, you normally won't have to call this
|
||||||
// method yourself. It is automatically called by parsekit when possible.
|
// method yourself. It is automatically called by parsekit when possible.
|
||||||
func (i *API) FlushInput() bool {
|
func (i Input) Flush() bool {
|
||||||
if i.stackFrame.offset > 0 {
|
if i.api.stackFrame.offset > 0 {
|
||||||
i.reader.Flush(i.stackFrame.offset)
|
i.api.reader.Flush(i.api.stackFrame.offset)
|
||||||
i.stackFrame.offset = 0
|
i.api.stackFrame.offset = 0
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -532,13 +549,6 @@ func (o Output) SetString(s string) {
|
||||||
o.SetBytes([]byte(s)...)
|
o.SetBytes([]byte(s)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *API) Cursor() string {
|
|
||||||
if i.stackFrame.line == 0 && i.stackFrame.column == 0 {
|
|
||||||
return fmt.Sprintf("start of file")
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("line %d, column %d", i.stackFrame.line+1, i.stackFrame.column+1)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o Output) Tokens() []Token {
|
func (o Output) Tokens() []Token {
|
||||||
return o.api.tokens[o.api.stackFrame.tokenStart:o.api.stackFrame.tokenEnd]
|
return o.api.tokens[o.api.stackFrame.tokenStart:o.api.stackFrame.tokenEnd]
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,10 +30,10 @@ func ExampleNewAPI() {
|
||||||
func ExampleAPI_PeekRune() {
|
func ExampleAPI_PeekRune() {
|
||||||
api := tokenize.NewAPI("The input that the API will handle")
|
api := tokenize.NewAPI("The input that the API will handle")
|
||||||
|
|
||||||
r1, _, err := api.PeekRune(19) // 'A'
|
r1, _, err := api.Input.PeekRune(19) // 'A'
|
||||||
r2, _, err := api.PeekRune(20) // 'P'
|
r2, _, err := api.Input.PeekRune(20) // 'P'
|
||||||
r3, _, err := api.PeekRune(21) // 'I'
|
r3, _, err := api.Input.PeekRune(21) // 'I'
|
||||||
_, _, err = api.PeekRune(100) // EOF
|
_, _, err = api.Input.PeekRune(100) // EOF
|
||||||
|
|
||||||
fmt.Printf("%c%c%c %s\n", r1, r2, r3, err)
|
fmt.Printf("%c%c%c %s\n", r1, r2, r3, err)
|
||||||
|
|
||||||
|
@ -45,15 +45,15 @@ func ExampleAPI_AcceptRune() {
|
||||||
api := tokenize.NewAPI("The input that the API will handle")
|
api := tokenize.NewAPI("The input that the API will handle")
|
||||||
|
|
||||||
// Reads 'T' and accepts it to the API results.
|
// Reads 'T' and accepts it to the API results.
|
||||||
r, _, _ := api.PeekRune(0)
|
r, _, _ := api.Input.PeekRune(0)
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
|
|
||||||
// Reads 'h' and accepts it to the API results.
|
// Reads 'h' and accepts it to the API results.
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
|
|
||||||
// Reads 'e', but does not accept it to the API results.
|
// Reads 'e', but does not accept it to the API results.
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
|
|
||||||
fmt.Printf("API results: %q\n", api.Output.String())
|
fmt.Printf("API results: %q\n", api.Output.String())
|
||||||
|
|
||||||
|
@ -65,14 +65,14 @@ func ExampleAPI_AcceptRunes() {
|
||||||
api := tokenize.NewAPI("The input that the API will handle")
|
api := tokenize.NewAPI("The input that the API will handle")
|
||||||
|
|
||||||
// Peeks at the first two runes 'T' and 'h'.
|
// Peeks at the first two runes 'T' and 'h'.
|
||||||
r0, _, _ := api.PeekRune(0)
|
r0, _, _ := api.Input.PeekRune(0)
|
||||||
r1, _, _ := api.PeekRune(1)
|
r1, _, _ := api.Input.PeekRune(1)
|
||||||
|
|
||||||
// Peeks at the third rune 'e'.
|
// Peeks at the third rune 'e'.
|
||||||
api.PeekRune(2)
|
api.Input.PeekRune(2)
|
||||||
|
|
||||||
// Accepts only 'T' and 'h' into the API results.
|
// Accepts only 'T' and 'h' into the API results.
|
||||||
api.AcceptRunes(r0, r1)
|
api.Input.AcceptRunes(r0, r1)
|
||||||
|
|
||||||
fmt.Printf("API results: %q\n", api.Output.String())
|
fmt.Printf("API results: %q\n", api.Output.String())
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ func ExampleAPI_SkipRune() {
|
||||||
api := tokenize.NewAPI("The input that the API will handle")
|
api := tokenize.NewAPI("The input that the API will handle")
|
||||||
|
|
||||||
for {
|
for {
|
||||||
r, _, err := api.PeekRune(0)
|
r, _, err := api.Input.PeekRune(0)
|
||||||
|
|
||||||
// EOF reached.
|
// EOF reached.
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -93,9 +93,9 @@ func ExampleAPI_SkipRune() {
|
||||||
|
|
||||||
// Only accept runes that are vowels.
|
// Only accept runes that are vowels.
|
||||||
if strings.ContainsRune("aeiouAEIOU", r) {
|
if strings.ContainsRune("aeiouAEIOU", r) {
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
} else {
|
} else {
|
||||||
api.SkipRune(r)
|
api.Input.SkipRune(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,22 +145,22 @@ func ExampleAPI_modifyingResults() {
|
||||||
func ExampleAPI_Reset() {
|
func ExampleAPI_Reset() {
|
||||||
api := tokenize.NewAPI("Very important input!")
|
api := tokenize.NewAPI("Very important input!")
|
||||||
|
|
||||||
r, _, _ := api.PeekRune(0) // read 'V'
|
r, _, _ := api.Input.PeekRune(0) // read 'V'
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
r, _, _ = api.PeekRune(0) // read 'e'
|
r, _, _ = api.Input.PeekRune(0) // read 'e'
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
fmt.Printf("API results: %q at %s\n", api.Output.String(), api.Cursor())
|
fmt.Printf("API results: %q at %s\n", api.Output.String(), api.Input.Cursor())
|
||||||
|
|
||||||
// Reset clears the results.
|
// Reset clears the results.
|
||||||
api.Reset()
|
api.Input.Reset()
|
||||||
fmt.Printf("API results: %q at %s\n", api.Output.String(), api.Cursor())
|
fmt.Printf("API results: %q at %s\n", api.Output.String(), api.Input.Cursor())
|
||||||
|
|
||||||
// So then doing the same read operations, the same data are read.
|
// So then doing the same read operations, the same data are read.
|
||||||
r, _, _ = api.PeekRune(0) // read 'V'
|
r, _, _ = api.Input.PeekRune(0) // read 'V'
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
r, _, _ = api.PeekRune(0) // read 'e'
|
r, _, _ = api.Input.PeekRune(0) // read 'e'
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
fmt.Printf("API results: %q at %s\n", api.Output.String(), api.Cursor())
|
fmt.Printf("API results: %q at %s\n", api.Output.String(), api.Input.Cursor())
|
||||||
|
|
||||||
// Output:
|
// Output:
|
||||||
// API results: "Ve" at line 1, column 3
|
// API results: "Ve" at line 1, column 3
|
||||||
|
@ -211,14 +211,14 @@ func ExampleAPI_Fork() {
|
||||||
func ExampleAPI_Merge() {
|
func ExampleAPI_Merge() {
|
||||||
tokenHandler := func(t *tokenize.API) bool {
|
tokenHandler := func(t *tokenize.API) bool {
|
||||||
child1 := t.Fork()
|
child1 := t.Fork()
|
||||||
r0, _, _ := t.PeekRune(0) // reads 'H'
|
r0, _, _ := t.Input.PeekRune(0) // reads 'H'
|
||||||
r1, _, _ := t.PeekRune(1) // reads 'i'
|
r1, _, _ := t.Input.PeekRune(1) // reads 'i'
|
||||||
t.AcceptRunes(r0, r1) // these runes are accepted in the API results for child1
|
t.Input.AcceptRunes(r0, r1) // these runes are accepted in the API results for child1
|
||||||
|
|
||||||
child2 := t.Fork()
|
child2 := t.Fork()
|
||||||
r0, _, _ = t.PeekRune(0) // reads ' '
|
r0, _, _ = t.Input.PeekRune(0) // reads ' '
|
||||||
r1, _, _ = t.PeekRune(1) // reads 'm'
|
r1, _, _ = t.Input.PeekRune(1) // reads 'm'
|
||||||
t.AcceptRunes(r0, r1) // these runes are accepted in the API results for child2
|
t.Input.AcceptRunes(r0, r1) // these runes are accepted in the API results for child2
|
||||||
t.Dispose(child2) // but they are not merged and thefore not used by child1
|
t.Dispose(child2) // but they are not merged and thefore not used by child1
|
||||||
|
|
||||||
t.Merge(child1) // We merge child1, which has read 'H' and 'i' only.
|
t.Merge(child1) // We merge child1, which has read 'H' and 'i' only.
|
||||||
|
@ -243,15 +243,15 @@ func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
|
||||||
child4 := api.Fork()
|
child4 := api.Fork()
|
||||||
|
|
||||||
// Read a rune 'a' from child4.
|
// Read a rune 'a' from child4.
|
||||||
r, _, _ := api.PeekRune(0)
|
r, _, _ := api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'a', r, "child4 rune 1")
|
AssertEqual(t, 'a', r, "child4 rune 1")
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
AssertEqual(t, "a", api.Output.String(), "child4 runes after rune 1")
|
AssertEqual(t, "a", api.Output.String(), "child4 runes after rune 1")
|
||||||
|
|
||||||
// Read another rune 'b' from child4.
|
// Read another rune 'b' from child4.
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'b', r, "child4 rune 2")
|
AssertEqual(t, 'b', r, "child4 rune 2")
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
AssertEqual(t, "ab", api.Output.String(), "child4 runes after rune 2")
|
AssertEqual(t, "ab", api.Output.String(), "child4 runes after rune 2")
|
||||||
|
|
||||||
// Merge "ab" from child4 to child3.
|
// Merge "ab" from child4 to child3.
|
||||||
|
@ -259,11 +259,11 @@ func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
|
||||||
AssertEqual(t, "", api.Output.String(), "child4 runes after first merge")
|
AssertEqual(t, "", api.Output.String(), "child4 runes after first merge")
|
||||||
|
|
||||||
// Read some more from child4.
|
// Read some more from child4.
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'c', r, "child4 rune 3")
|
AssertEqual(t, 'c', r, "child4 rune 3")
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
AssertEqual(t, "c", api.Output.String(), "child4 runes after rune 1")
|
AssertEqual(t, "c", api.Output.String(), "child4 runes after rune 1")
|
||||||
AssertEqual(t, "line 1, column 4", api.Cursor(), "cursor child4 rune 3")
|
AssertEqual(t, "line 1, column 4", api.Input.Cursor(), "cursor child4 rune 3")
|
||||||
|
|
||||||
// Merge "c" from child4 to child3.
|
// Merge "c" from child4 to child3.
|
||||||
api.Merge(child4)
|
api.Merge(child4)
|
||||||
|
@ -273,32 +273,32 @@ func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
|
||||||
|
|
||||||
// Child3 should now have the compbined results "abc" from child4's work.
|
// Child3 should now have the compbined results "abc" from child4's work.
|
||||||
AssertEqual(t, "abc", api.Output.String(), "child3 after merge of child4")
|
AssertEqual(t, "abc", api.Output.String(), "child3 after merge of child4")
|
||||||
AssertEqual(t, "line 1, column 4", api.Cursor(), "cursor child3 rune 3, after merge of child4")
|
AssertEqual(t, "line 1, column 4", api.Input.Cursor(), "cursor child3 rune 3, after merge of child4")
|
||||||
|
|
||||||
// Now read some data from child3.
|
// Now read some data from child3.
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'd', r, "child3 rune 5")
|
AssertEqual(t, 'd', r, "child3 rune 5")
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
|
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'e', r, "child3 rune 5")
|
AssertEqual(t, 'e', r, "child3 rune 5")
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
|
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'f', r, "child3 rune 5")
|
AssertEqual(t, 'f', r, "child3 rune 5")
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
|
|
||||||
AssertEqual(t, "abcdef", api.Output.String(), "child3 total result after rune 6")
|
AssertEqual(t, "abcdef", api.Output.String(), "child3 total result after rune 6")
|
||||||
|
|
||||||
// Temporarily go some new forks from here, but don't use their outcome.
|
// Temporarily go some new forks from here, but don't use their outcome.
|
||||||
child3sub1 := api.Fork()
|
child3sub1 := api.Fork()
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
child3sub2 := api.Fork()
|
child3sub2 := api.Fork()
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
api.Merge(child3sub2) // do merge sub2 down to sub1
|
api.Merge(child3sub2) // do merge sub2 down to sub1
|
||||||
api.Dispose(child3sub2) // and dispose of sub2
|
api.Dispose(child3sub2) // and dispose of sub2
|
||||||
api.Dispose(child3sub1) // but dispose of sub1 without merging
|
api.Dispose(child3sub1) // but dispose of sub1 without merging
|
||||||
|
@ -309,7 +309,7 @@ func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
|
||||||
api.Dispose(child3)
|
api.Dispose(child3)
|
||||||
|
|
||||||
AssertEqual(t, "abcdef", api.Output.String(), "child2 total result after merge of child3")
|
AssertEqual(t, "abcdef", api.Output.String(), "child2 total result after merge of child3")
|
||||||
AssertEqual(t, "line 1, column 7", api.Cursor(), "cursor child2 after merge child3")
|
AssertEqual(t, "line 1, column 7", api.Input.Cursor(), "cursor child2 after merge child3")
|
||||||
|
|
||||||
// Merge child2 to child1 and dispose of it.
|
// Merge child2 to child1 and dispose of it.
|
||||||
api.Merge(child2)
|
api.Merge(child2)
|
||||||
|
@ -325,24 +325,24 @@ func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
|
||||||
api.Dispose(child1)
|
api.Dispose(child1)
|
||||||
|
|
||||||
// Read some data from the top level api.
|
// Read some data from the top level api.
|
||||||
r, _, _ = api.PeekRune(0)
|
r, _, _ = api.Input.PeekRune(0)
|
||||||
api.AcceptRune(r)
|
api.Input.AcceptRune(r)
|
||||||
|
|
||||||
AssertEqual(t, "abcdefg", api.Output.String(), "api string end result")
|
AssertEqual(t, "abcdefg", api.Output.String(), "api string end result")
|
||||||
AssertEqual(t, "line 1, column 8", api.Cursor(), "api cursor end result")
|
AssertEqual(t, "line 1, column 8", api.Input.Cursor(), "api cursor end result")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestClearData(t *testing.T) {
|
func TestClearData(t *testing.T) {
|
||||||
api := tokenize.NewAPI("Laphroaig")
|
api := tokenize.NewAPI("Laphroaig")
|
||||||
r, _, _ := api.PeekRune(0) // Read 'L'
|
r, _, _ := api.Input.PeekRune(0) // Read 'L'
|
||||||
api.AcceptRune(r) // Add to runes
|
api.Input.AcceptRune(r) // Add to runes
|
||||||
r, _, _ = api.PeekRune(0) // Read 'a'
|
r, _, _ = api.Input.PeekRune(0) // Read 'a'
|
||||||
api.AcceptRune(r) // Add to runes
|
api.Input.AcceptRune(r) // Add to runes
|
||||||
api.Output.ClearData() // Clear the runes, giving us a fresh start.
|
api.Output.ClearData() // Clear the runes, giving us a fresh start.
|
||||||
r, _, _ = api.PeekRune(0) // Read 'p'
|
r, _, _ = api.Input.PeekRune(0) // Read 'p'
|
||||||
api.AcceptRune(r) // Add to runes
|
api.Input.AcceptRune(r) // Add to runes
|
||||||
r, _, _ = api.PeekRune(0) // Read 'r'
|
r, _, _ = api.Input.PeekRune(0) // Read 'r'
|
||||||
api.AcceptRune(r) // Add to runes
|
api.Input.AcceptRune(r) // Add to runes
|
||||||
|
|
||||||
AssertEqual(t, "ph", api.Output.String(), "api string end result")
|
AssertEqual(t, "ph", api.Output.String(), "api string end result")
|
||||||
}
|
}
|
||||||
|
@ -374,7 +374,7 @@ func TestMergeScenariosForTokens(t *testing.T) {
|
||||||
|
|
||||||
child = api.Fork()
|
child = api.Fork()
|
||||||
api.Output.AddTokens(token3)
|
api.Output.AddTokens(token3)
|
||||||
api.Reset()
|
api.Input.Reset()
|
||||||
api.Output.AddTokens(token4)
|
api.Output.AddTokens(token4)
|
||||||
|
|
||||||
api.Merge(child)
|
api.Merge(child)
|
||||||
|
|
|
@ -14,7 +14,7 @@ func TestMoveCursorByBytes(t *testing.T) {
|
||||||
api.stackFrame.moveCursorByByte('a')
|
api.stackFrame.moveCursorByByte('a')
|
||||||
api.stackFrame.moveCursorByByte('b')
|
api.stackFrame.moveCursorByByte('b')
|
||||||
|
|
||||||
AssertEqual(t, "line 2, column 3", api.Cursor(), "Cursor position after moving by byte")
|
AssertEqual(t, "line 2, column 3", api.Input.Cursor(), "Cursor position after moving by byte")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMoveCursorByRunes(t *testing.T) {
|
func TestMoveCursorByRunes(t *testing.T) {
|
||||||
|
@ -26,7 +26,7 @@ func TestMoveCursorByRunes(t *testing.T) {
|
||||||
api.stackFrame.moveCursorByRune('\n')
|
api.stackFrame.moveCursorByRune('\n')
|
||||||
api.stackFrame.moveCursorByRune('ǝ')
|
api.stackFrame.moveCursorByRune('ǝ')
|
||||||
|
|
||||||
AssertEqual(t, "line 2, column 2", api.Cursor(), "Cursor position after moving by rune")
|
AssertEqual(t, "line 2, column 2", api.Input.Cursor(), "Cursor position after moving by rune")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWhenMovingCursor_CursorPositionIsUpdated(t *testing.T) {
|
func TestWhenMovingCursor_CursorPositionIsUpdated(t *testing.T) {
|
||||||
|
|
|
@ -350,9 +350,9 @@ var T = struct {
|
||||||
// MatchByte creates a Handler function that matches against the provided byte.
|
// MatchByte creates a Handler function that matches against the provided byte.
|
||||||
func MatchByte(expected byte) Handler {
|
func MatchByte(expected byte) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err == nil && b == expected {
|
if err == nil && b == expected {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -365,9 +365,9 @@ func MatchRune(expected rune) Handler {
|
||||||
return MatchByte(byte(expected))
|
return MatchByte(byte(expected))
|
||||||
}
|
}
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil && r == expected {
|
if err == nil && r == expected {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -378,13 +378,13 @@ func MatchRune(expected rune) Handler {
|
||||||
// one of the provided bytes. The first match counts.
|
// one of the provided bytes. The first match counts.
|
||||||
func MatchBytes(expected ...byte) Handler {
|
func MatchBytes(expected ...byte) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for _, e := range expected {
|
for _, e := range expected {
|
||||||
if b == e {
|
if b == e {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -408,13 +408,13 @@ func MatchRunes(expected ...rune) Handler {
|
||||||
return MatchBytes(expectedBytes...)
|
return MatchBytes(expectedBytes...)
|
||||||
}
|
}
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for _, e := range expected {
|
for _, e := range expected {
|
||||||
if r == e {
|
if r == e {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -434,9 +434,9 @@ func MatchByteRange(start byte, end byte) Handler {
|
||||||
callerPanic("MatchByteRange", "Handler: {name} definition error at {caller}: start %q must not be < end %q", start, end)
|
callerPanic("MatchByteRange", "Handler: {name} definition error at {caller}: start %q must not be < end %q", start, end)
|
||||||
}
|
}
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, err := t.PeekByte(0)
|
r, err := t.Input.PeekByte(0)
|
||||||
if err == nil && r >= start && r <= end {
|
if err == nil && r >= start && r <= end {
|
||||||
t.AcceptByte(r)
|
t.Input.AcceptByte(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -458,9 +458,9 @@ func MatchRuneRange(start rune, end rune) Handler {
|
||||||
return MatchByteRange(byte(start), byte(end))
|
return MatchByteRange(byte(start), byte(end))
|
||||||
}
|
}
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil && r >= start && r <= end {
|
if err == nil && r >= start && r <= end {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -471,18 +471,18 @@ func MatchRuneRange(start rune, end rune) Handler {
|
||||||
// a DOS-style newline (CRLF, \r\n) or a UNIX-style newline (just a LF, \n).
|
// a DOS-style newline (CRLF, \r\n) or a UNIX-style newline (just a LF, \n).
|
||||||
func MatchNewline() Handler {
|
func MatchNewline() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b1, err := t.PeekByte(0)
|
b1, err := t.Input.PeekByte(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if b1 == '\n' {
|
if b1 == '\n' {
|
||||||
t.AcceptBytes(b1)
|
t.Input.AcceptBytes(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if b1 == '\r' {
|
if b1 == '\r' {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
if err == nil && b2 == '\n' {
|
if err == nil && b2 == '\n' {
|
||||||
t.AcceptBytes(b1, b2)
|
t.Input.AcceptBytes(b1, b2)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -497,9 +497,9 @@ func MatchNewline() Handler {
|
||||||
// newlines, then take a look at MatchWhitespace().
|
// newlines, then take a look at MatchWhitespace().
|
||||||
func MatchBlank() Handler {
|
func MatchBlank() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err == nil && (b == ' ' || b == '\t') {
|
if err == nil && (b == ' ' || b == '\t') {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -516,20 +516,20 @@ func MatchBlank() Handler {
|
||||||
func MatchBlanks() Handler {
|
func MatchBlanks() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
// Match the first blank.
|
// Match the first blank.
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || (b != ' ' && b != '\t') {
|
if err != nil || (b != ' ' && b != '\t') {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
|
|
||||||
// Now match any number of followup blanks. We've already got
|
// Now match any number of followup blanks. We've already got
|
||||||
// a successful match at this point, so we'll always return true at the end.
|
// a successful match at this point, so we'll always return true at the end.
|
||||||
for {
|
for {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || (b != ' ' && b != '\t') {
|
if err != nil || (b != ' ' && b != '\t') {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -540,35 +540,35 @@ func MatchBlanks() Handler {
|
||||||
func MatchWhitespace() Handler {
|
func MatchWhitespace() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
// Match the first whitespace.
|
// Match the first whitespace.
|
||||||
b1, err := t.PeekByte(0)
|
b1, err := t.Input.PeekByte(0)
|
||||||
if err != nil || (b1 != ' ' && b1 != '\t' && b1 != '\n' && b1 != '\r') {
|
if err != nil || (b1 != ' ' && b1 != '\t' && b1 != '\n' && b1 != '\r') {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if b1 == '\r' {
|
if b1 == '\r' {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
if err != nil || b2 != '\n' {
|
if err != nil || b2 != '\n' {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
t.AcceptBytes(b1, b2)
|
t.Input.AcceptBytes(b1, b2)
|
||||||
} else {
|
} else {
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now match any number of followup whitespace. We've already got
|
// Now match any number of followup whitespace. We've already got
|
||||||
// a successful match at this point, so we'll always return true at the end.
|
// a successful match at this point, so we'll always return true at the end.
|
||||||
for {
|
for {
|
||||||
b1, err := t.PeekByte(0)
|
b1, err := t.Input.PeekByte(0)
|
||||||
if err != nil || (b1 != ' ' && b1 != '\t' && b1 != '\n' && b1 != '\r') {
|
if err != nil || (b1 != ' ' && b1 != '\t' && b1 != '\n' && b1 != '\r') {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if b1 == '\r' {
|
if b1 == '\r' {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
if err != nil || b2 != '\n' {
|
if err != nil || b2 != '\n' {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.AcceptBytes(b1, b2)
|
t.Input.AcceptBytes(b1, b2)
|
||||||
} else {
|
} else {
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -588,9 +588,9 @@ func MatchUnicodeSpace() Handler {
|
||||||
// so those can be used. E.g. MatchRuneByCallback(unicode.IsLower).
|
// so those can be used. E.g. MatchRuneByCallback(unicode.IsLower).
|
||||||
func MatchByteByCallback(callback func(byte) bool) Handler {
|
func MatchByteByCallback(callback func(byte) bool) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err == nil && callback(b) {
|
if err == nil && callback(b) {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -605,9 +605,9 @@ func MatchByteByCallback(callback func(byte) bool) Handler {
|
||||||
// so those can be used. E.g. MatchRuneByCallback(unicode.IsLower).
|
// so those can be used. E.g. MatchRuneByCallback(unicode.IsLower).
|
||||||
func MatchRuneByCallback(callback func(rune) bool) Handler {
|
func MatchRuneByCallback(callback func(rune) bool) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil && callback(r) {
|
if err == nil && callback(r) {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -617,18 +617,18 @@ func MatchRuneByCallback(callback func(rune) bool) Handler {
|
||||||
// MatchEndOfLine creates a Handler that matches a newline ("\r\n" or "\n") or EOF.
|
// MatchEndOfLine creates a Handler that matches a newline ("\r\n" or "\n") or EOF.
|
||||||
func MatchEndOfLine() Handler {
|
func MatchEndOfLine() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b1, err := t.PeekByte(0)
|
b1, err := t.Input.PeekByte(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err == io.EOF
|
return err == io.EOF
|
||||||
}
|
}
|
||||||
if b1 == '\n' {
|
if b1 == '\n' {
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if b1 == '\r' {
|
if b1 == '\r' {
|
||||||
b2, _ := t.PeekByte(1)
|
b2, _ := t.Input.PeekByte(1)
|
||||||
if b2 == '\n' {
|
if b2 == '\n' {
|
||||||
t.AcceptBytes(b1, b2)
|
t.Input.AcceptBytes(b1, b2)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -644,20 +644,20 @@ func MatchStr(expected string) Handler {
|
||||||
offset := 0
|
offset := 0
|
||||||
for _, e := range expectedRunes {
|
for _, e := range expectedRunes {
|
||||||
if e <= '\x7F' {
|
if e <= '\x7F' {
|
||||||
b, err := t.PeekByte(offset)
|
b, err := t.Input.PeekByte(offset)
|
||||||
if err != nil || b != byte(e) {
|
if err != nil || b != byte(e) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
offset++
|
offset++
|
||||||
} else {
|
} else {
|
||||||
r, w, err := t.PeekRune(offset)
|
r, w, err := t.Input.PeekRune(offset)
|
||||||
if err != nil || e != r {
|
if err != nil || e != r {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
offset += w
|
offset += w
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
t.AcceptRunes(expectedRunes...)
|
t.Input.AcceptRunes(expectedRunes...)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -673,14 +673,14 @@ func MatchStrNoCase(expected string) Handler {
|
||||||
i := 0
|
i := 0
|
||||||
for _, e := range expected {
|
for _, e := range expected {
|
||||||
if e <= '\x7F' {
|
if e <= '\x7F' {
|
||||||
b, err := t.PeekByte(width)
|
b, err := t.Input.PeekByte(width)
|
||||||
if err != nil || (b != byte(e) && unicode.ToUpper(rune(b)) != unicode.ToUpper(e)) {
|
if err != nil || (b != byte(e) && unicode.ToUpper(rune(b)) != unicode.ToUpper(e)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
matches[i] = rune(b)
|
matches[i] = rune(b)
|
||||||
width++
|
width++
|
||||||
} else {
|
} else {
|
||||||
r, w, err := t.PeekRune(width)
|
r, w, err := t.Input.PeekRune(width)
|
||||||
if err != nil || (r != e && unicode.ToUpper(r) != unicode.ToUpper(e)) {
|
if err != nil || (r != e && unicode.ToUpper(r) != unicode.ToUpper(e)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -689,7 +689,7 @@ func MatchStrNoCase(expected string) Handler {
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
t.AcceptRunes(matches...)
|
t.Input.AcceptRunes(matches...)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -761,9 +761,9 @@ func MatchNot(handler Handler) Handler {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
t.Dispose(child)
|
t.Dispose(child)
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -940,7 +940,7 @@ func MatchNotFollowedBy(lookAhead Handler, handler Handler) Handler {
|
||||||
func MakeInputFlusher(handler Handler) Handler {
|
func MakeInputFlusher(handler Handler) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
if handler(t) {
|
if handler(t) {
|
||||||
t.FlushInput()
|
t.Input.Flush()
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -955,13 +955,13 @@ func MakeInputFlusher(handler Handler) Handler {
|
||||||
func MatchSigned(handler Handler) Handler {
|
func MatchSigned(handler Handler) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
child := t.Fork()
|
child := t.Fork()
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Dispose(child)
|
t.Dispose(child)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if b == '-' || b == '+' {
|
if b == '-' || b == '+' {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
}
|
}
|
||||||
if handler(t) {
|
if handler(t) {
|
||||||
t.Merge(child)
|
t.Merge(child)
|
||||||
|
@ -1001,7 +1001,7 @@ func MatchIntegerBetween(min int64, max int64) Handler {
|
||||||
func MatchEndOfFile() Handler {
|
func MatchEndOfFile() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
child := t.Fork()
|
child := t.Fork()
|
||||||
_, err := t.PeekByte(0)
|
_, err := t.Input.PeekByte(0)
|
||||||
t.Dispose(child)
|
t.Dispose(child)
|
||||||
return err == io.EOF
|
return err == io.EOF
|
||||||
}
|
}
|
||||||
|
@ -1017,9 +1017,9 @@ func MatchUntilEndOfLine() Handler {
|
||||||
// MatchAnyByte creates a Handler function that accepts any byte from the input.
|
// MatchAnyByte creates a Handler function that accepts any byte from the input.
|
||||||
func MatchAnyByte() Handler {
|
func MatchAnyByte() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -1031,9 +1031,9 @@ func MatchAnyByte() Handler {
|
||||||
// replacement rune \uFFFD (i.e. utf8.RuneError), which displays as <20>.
|
// replacement rune \uFFFD (i.e. utf8.RuneError), which displays as <20>.
|
||||||
func MatchAnyRune() Handler {
|
func MatchAnyRune() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -1044,9 +1044,9 @@ func MatchAnyRune() Handler {
|
||||||
// UTF8 rune can be read from the input.
|
// UTF8 rune can be read from the input.
|
||||||
func MatchValidRune() Handler {
|
func MatchValidRune() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil && r != utf8.RuneError {
|
if err == nil && r != utf8.RuneError {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -1057,9 +1057,9 @@ func MatchValidRune() Handler {
|
||||||
// UTF8 rune can be read from the input.
|
// UTF8 rune can be read from the input.
|
||||||
func MatchInvalidRune() Handler {
|
func MatchInvalidRune() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
r, _, err := t.PeekRune(0)
|
r, _, err := t.Input.PeekRune(0)
|
||||||
if err == nil && r == utf8.RuneError {
|
if err == nil && r == utf8.RuneError {
|
||||||
t.AcceptRune(r)
|
t.Input.AcceptRune(r)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -1077,19 +1077,19 @@ func MatchDigit() Handler {
|
||||||
func MatchDigits() Handler {
|
func MatchDigits() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
// Check if the first character is a digit.
|
// Check if the first character is a digit.
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
|
|
||||||
// Continue accepting bytes as long as they are digits.
|
// Continue accepting bytes as long as they are digits.
|
||||||
for {
|
for {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1108,7 +1108,7 @@ func MatchDigitNotZero() Handler {
|
||||||
func MatchInteger(normalize bool) Handler {
|
func MatchInteger(normalize bool) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
// Check if the first character is a digit.
|
// Check if the first character is a digit.
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -1116,33 +1116,33 @@ func MatchInteger(normalize bool) Handler {
|
||||||
// When normalization is requested, drop leading zeroes.
|
// When normalization is requested, drop leading zeroes.
|
||||||
if normalize && b == '0' {
|
if normalize && b == '0' {
|
||||||
for {
|
for {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
|
|
||||||
// The next character is a zero, skip the leading zero and check again.
|
// The next character is a zero, skip the leading zero and check again.
|
||||||
if err == nil && b2 == b {
|
if err == nil && b2 == b {
|
||||||
t.SkipByte('0')
|
t.Input.SkipByte('0')
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// The next character is not a zero, nor a digit at all.
|
// The next character is not a zero, nor a digit at all.
|
||||||
// We're looking at a zero on its own here.
|
// We're looking at a zero on its own here.
|
||||||
if err != nil || b2 < '1' || b2 > '9' {
|
if err != nil || b2 < '1' || b2 > '9' {
|
||||||
t.AcceptByte('0')
|
t.Input.AcceptByte('0')
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// The next character is a digit. SKip the leading zero and go with the digit.
|
// The next character is a digit. SKip the leading zero and go with the digit.
|
||||||
t.SkipByte('0')
|
t.Input.SkipByte('0')
|
||||||
t.AcceptByte(b2)
|
t.Input.AcceptByte(b2)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Continue accepting bytes as long as they are digits.
|
// Continue accepting bytes as long as they are digits.
|
||||||
for {
|
for {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1157,7 +1157,7 @@ func MatchInteger(normalize bool) Handler {
|
||||||
func MatchDecimal(normalize bool) Handler {
|
func MatchDecimal(normalize bool) Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
// Check if the first character is a digit.
|
// Check if the first character is a digit.
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -1165,58 +1165,58 @@ func MatchDecimal(normalize bool) Handler {
|
||||||
// When normalization is requested, drop leading zeroes.
|
// When normalization is requested, drop leading zeroes.
|
||||||
if normalize && b == '0' {
|
if normalize && b == '0' {
|
||||||
for {
|
for {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
|
|
||||||
// The next character is a zero, skip the leading zero and check again.
|
// The next character is a zero, skip the leading zero and check again.
|
||||||
if err == nil && b2 == b {
|
if err == nil && b2 == b {
|
||||||
t.SkipByte('0')
|
t.Input.SkipByte('0')
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// The next character is a dot, go with the zero before the dot and
|
// The next character is a dot, go with the zero before the dot and
|
||||||
// let the upcoming code handle the dot.
|
// let the upcoming code handle the dot.
|
||||||
if err == nil && b2 == '.' {
|
if err == nil && b2 == '.' {
|
||||||
t.AcceptByte('0')
|
t.Input.AcceptByte('0')
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
// The next character is not a zero, nor a digit at all.
|
// The next character is not a zero, nor a digit at all.
|
||||||
// We're looking at a zero on its own here.
|
// We're looking at a zero on its own here.
|
||||||
if err != nil || b2 < '1' || b2 > '9' {
|
if err != nil || b2 < '1' || b2 > '9' {
|
||||||
t.AcceptByte('0')
|
t.Input.AcceptByte('0')
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// The next character is a digit. SKip the leading zero and go with the digit.
|
// The next character is a digit. SKip the leading zero and go with the digit.
|
||||||
t.SkipByte('0')
|
t.Input.SkipByte('0')
|
||||||
t.AcceptByte(b2)
|
t.Input.AcceptByte(b2)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Continue accepting bytes as long as they are digits.
|
// Continue accepting bytes as long as they are digits.
|
||||||
for {
|
for {
|
||||||
b, err = t.PeekByte(0)
|
b, err = t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
t.AcceptBytes(b)
|
t.Input.AcceptBytes(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
// No dot or no digit after a dot? Then we're done.
|
// No dot or no digit after a dot? Then we're done.
|
||||||
if b != '.' {
|
if b != '.' {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
b, err = t.PeekByte(1)
|
b, err = t.Input.PeekByte(1)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Continue accepting bytes as long as they are digits.
|
// Continue accepting bytes as long as they are digits.
|
||||||
t.AcceptBytes('.', b)
|
t.Input.AcceptBytes('.', b)
|
||||||
for {
|
for {
|
||||||
b, err = t.PeekByte(0)
|
b, err = t.Input.PeekByte(0)
|
||||||
if err != nil || b < '0' || b > '9' {
|
if err != nil || b < '0' || b > '9' {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -1231,52 +1231,52 @@ func MatchDecimal(normalize bool) Handler {
|
||||||
// False falues: false, FALSE, False, 0, f, F
|
// False falues: false, FALSE, False, 0, f, F
|
||||||
func MatchBoolean() Handler {
|
func MatchBoolean() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b1, err := t.PeekByte(0)
|
b1, err := t.Input.PeekByte(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if b1 == '1' || b1 == '0' {
|
if b1 == '1' || b1 == '0' {
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if b1 == 't' || b1 == 'T' {
|
if b1 == 't' || b1 == 'T' {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
if err != nil || (b2 != 'R' && b2 != 'r') {
|
if err != nil || (b2 != 'R' && b2 != 'r') {
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
b3, _ := t.PeekByte(2)
|
b3, _ := t.Input.PeekByte(2)
|
||||||
b4, err := t.PeekByte(3)
|
b4, err := t.Input.PeekByte(3)
|
||||||
if err == nil && b2 == 'r' && b3 == 'u' && b4 == 'e' {
|
if err == nil && b2 == 'r' && b3 == 'u' && b4 == 'e' {
|
||||||
t.AcceptBytes(b1, b2, b3, b4)
|
t.Input.AcceptBytes(b1, b2, b3, b4)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if err == nil && b1 == 'T' && b2 == 'R' && b3 == 'U' && b4 == 'E' {
|
if err == nil && b1 == 'T' && b2 == 'R' && b3 == 'U' && b4 == 'E' {
|
||||||
t.AcceptBytes(b1, b2, b3, b4)
|
t.Input.AcceptBytes(b1, b2, b3, b4)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if b1 == 'f' || b1 == 'F' {
|
if b1 == 'f' || b1 == 'F' {
|
||||||
b2, err := t.PeekByte(1)
|
b2, err := t.Input.PeekByte(1)
|
||||||
if err != nil || (b2 != 'A' && b2 != 'a') {
|
if err != nil || (b2 != 'A' && b2 != 'a') {
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
b3, _ := t.PeekByte(2)
|
b3, _ := t.Input.PeekByte(2)
|
||||||
b4, _ := t.PeekByte(3)
|
b4, _ := t.Input.PeekByte(3)
|
||||||
b5, err := t.PeekByte(4)
|
b5, err := t.Input.PeekByte(4)
|
||||||
if err == nil && b2 == 'a' && b3 == 'l' && b4 == 's' && b5 == 'e' {
|
if err == nil && b2 == 'a' && b3 == 'l' && b4 == 's' && b5 == 'e' {
|
||||||
t.AcceptBytes(b1, b2, b3, b4, b5)
|
t.Input.AcceptBytes(b1, b2, b3, b4, b5)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if err == nil && b1 == 'F' && b2 == 'A' && b3 == 'L' && b4 == 'S' && b5 == 'E' {
|
if err == nil && b1 == 'F' && b2 == 'A' && b3 == 'L' && b4 == 'S' && b5 == 'E' {
|
||||||
t.AcceptBytes(b1, b2, b3, b4, b5)
|
t.Input.AcceptBytes(b1, b2, b3, b4, b5)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.AcceptByte(b1)
|
t.Input.AcceptByte(b1)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -1323,9 +1323,9 @@ func MatchUnicodeLower() Handler {
|
||||||
// digit can be read from the input.
|
// digit can be read from the input.
|
||||||
func MatchHexDigit() Handler {
|
func MatchHexDigit() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err == nil && ((b >= '0' && b <= '9') || (b >= 'a' && b <= 'f') || (b >= 'A' && b <= 'F')) {
|
if err == nil && ((b >= '0' && b <= '9') || (b >= 'a' && b <= 'f') || (b >= 'A' && b <= 'F')) {
|
||||||
t.AcceptByte(b)
|
t.Input.AcceptByte(b)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
@ -1556,7 +1556,7 @@ func ModifyDrop(handler Handler) Handler {
|
||||||
func ModifyDropUntilEndOfLine() Handler {
|
func ModifyDropUntilEndOfLine() Handler {
|
||||||
return func(t *API) bool {
|
return func(t *API) bool {
|
||||||
for {
|
for {
|
||||||
b, err := t.PeekByte(0)
|
b, err := t.Input.PeekByte(0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
return true
|
return true
|
||||||
|
@ -1566,7 +1566,7 @@ func ModifyDropUntilEndOfLine() Handler {
|
||||||
if b == '\n' {
|
if b == '\n' {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
t.SkipByte(b)
|
t.Input.SkipByte(b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ func New(tokenHandler Handler) Func {
|
||||||
ok := tokenHandler(api)
|
ok := tokenHandler(api)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
err := fmt.Errorf("mismatch at %s", api.Cursor())
|
err := fmt.Errorf("mismatch at %s", api.Input.Cursor())
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
result := &Result{
|
result := &Result{
|
||||||
|
|
|
@ -55,19 +55,19 @@ func ExampleNew() {
|
||||||
|
|
||||||
func TestCallingPeekRune_PeeksRuneOnInput(t *testing.T) {
|
func TestCallingPeekRune_PeeksRuneOnInput(t *testing.T) {
|
||||||
api := makeTokenizeAPI()
|
api := makeTokenizeAPI()
|
||||||
r, _, _ := api.PeekRune(0)
|
r, _, _ := api.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'T', r, "first rune")
|
AssertEqual(t, 'T', r, "first rune")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestInputCanAcceptRunesFromReader(t *testing.T) {
|
func TestInputCanAcceptRunesFromReader(t *testing.T) {
|
||||||
i := makeTokenizeAPI()
|
i := makeTokenizeAPI()
|
||||||
|
|
||||||
r0, _, _ := i.PeekRune(0)
|
r0, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r0)
|
i.Input.AcceptRune(r0)
|
||||||
|
|
||||||
r1, _, _ := i.PeekRune(0) // 0, because read offset resets to 0 after Accept* calls.
|
r1, _, _ := i.Input.PeekRune(0) // 0, because read offset resets to 0 after Accept* calls.
|
||||||
r2, _, _ := i.PeekRune(1)
|
r2, _, _ := i.Input.PeekRune(1)
|
||||||
i.AcceptRunes(r1, r2)
|
i.Input.AcceptRunes(r1, r2)
|
||||||
|
|
||||||
AssertEqual(t, "Tes", i.Output.String(), "i.String()")
|
AssertEqual(t, "Tes", i.Output.String(), "i.String()")
|
||||||
}
|
}
|
||||||
|
@ -134,29 +134,29 @@ func TestCallingForkOnForkedParentAPI_Panics(t *testing.T) {
|
||||||
|
|
||||||
func TestAccept_UpdatesCursor(t *testing.T) {
|
func TestAccept_UpdatesCursor(t *testing.T) {
|
||||||
i := tokenize.NewAPI(strings.NewReader("input\r\nwith\r\nnewlines"))
|
i := tokenize.NewAPI(strings.NewReader("input\r\nwith\r\nnewlines"))
|
||||||
AssertEqual(t, "start of file", i.Cursor(), "cursor 1")
|
AssertEqual(t, "start of file", i.Input.Cursor(), "cursor 1")
|
||||||
for j := 0; j < 6; j++ { // read "input\r", cursor end up at "\n"
|
for j := 0; j < 6; j++ { // read "input\r", cursor end up at "\n"
|
||||||
r, _, _ := i.PeekRune(0)
|
r, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r)
|
i.Input.AcceptRune(r)
|
||||||
}
|
}
|
||||||
AssertEqual(t, "line 1, column 7", i.Cursor(), "cursor 2")
|
AssertEqual(t, "line 1, column 7", i.Input.Cursor(), "cursor 2")
|
||||||
|
|
||||||
r, _, _ := i.PeekRune(0) // read "\n", cursor ends up at start of new line
|
r, _, _ := i.Input.PeekRune(0) // read "\n", cursor ends up at start of new line
|
||||||
i.AcceptRune(r)
|
i.Input.AcceptRune(r)
|
||||||
AssertEqual(t, "line 2, column 1", i.Cursor(), "cursor 3")
|
AssertEqual(t, "line 2, column 1", i.Input.Cursor(), "cursor 3")
|
||||||
|
|
||||||
for j := 0; j < 10; j++ { // read "with\r\nnewl", cursor end up at "i"
|
for j := 0; j < 10; j++ { // read "with\r\nnewl", cursor end up at "i"
|
||||||
b, _ := i.PeekByte(0)
|
b, _ := i.Input.PeekByte(0)
|
||||||
i.AcceptByte(b)
|
i.Input.AcceptByte(b)
|
||||||
}
|
}
|
||||||
AssertEqual(t, "line 3, column 5", i.Cursor(), "cursor 4")
|
AssertEqual(t, "line 3, column 5", i.Input.Cursor(), "cursor 4")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWhenCallingPeekruneAtEndOfFile_EOFIsReturned(t *testing.T) {
|
func TestWhenCallingPeekruneAtEndOfFile_EOFIsReturned(t *testing.T) {
|
||||||
i := tokenize.NewAPI(strings.NewReader("X"))
|
i := tokenize.NewAPI(strings.NewReader("X"))
|
||||||
r, _, _ := i.PeekRune(0)
|
r, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r)
|
i.Input.AcceptRune(r)
|
||||||
r, _, err := i.PeekRune(0)
|
r, _, err := i.Input.PeekRune(0)
|
||||||
|
|
||||||
AssertEqual(t, true, r == utf8.RuneError, "returned rune from NextRune()")
|
AssertEqual(t, true, r == utf8.RuneError, "returned rune from NextRune()")
|
||||||
AssertEqual(t, true, err == io.EOF, "returned error from NextRune()")
|
AssertEqual(t, true, err == io.EOF, "returned error from NextRune()")
|
||||||
|
@ -167,9 +167,9 @@ func TestAfterReadingruneAtEndOfFile_EarlierRunesCanStillBeAccessed(t *testing.T
|
||||||
child := i.Fork()
|
child := i.Fork()
|
||||||
|
|
||||||
// To to the EOF.
|
// To to the EOF.
|
||||||
r, _, _ := i.PeekRune(0)
|
r, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r)
|
i.Input.AcceptRune(r)
|
||||||
r, _, err := i.PeekRune(0)
|
r, _, err := i.Input.PeekRune(0)
|
||||||
AssertEqual(t, true, r == utf8.RuneError, "returned rune from 2nd NextRune()")
|
AssertEqual(t, true, r == utf8.RuneError, "returned rune from 2nd NextRune()")
|
||||||
AssertEqual(t, true, err == io.EOF, "returned error from 2nd NextRune()")
|
AssertEqual(t, true, err == io.EOF, "returned error from 2nd NextRune()")
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ func TestAfterReadingruneAtEndOfFile_EarlierRunesCanStillBeAccessed(t *testing.T
|
||||||
i.Dispose(child)
|
i.Dispose(child)
|
||||||
|
|
||||||
// So here we should see the same input data as before.
|
// So here we should see the same input data as before.
|
||||||
r, _, err = i.PeekRune(0)
|
r, _, err = i.Input.PeekRune(0)
|
||||||
AssertEqual(t, 'X', r, "returned rune from 2nd NextRune()")
|
AssertEqual(t, 'X', r, "returned rune from 2nd NextRune()")
|
||||||
AssertEqual(t, true, err == nil, "returned error from 2nd NextRune()")
|
AssertEqual(t, true, err == nil, "returned error from 2nd NextRune()")
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,8 +7,8 @@ import (
|
||||||
func TestFork_CreatesForkOfInputAtSameCursorPosition(t *testing.T) {
|
func TestFork_CreatesForkOfInputAtSameCursorPosition(t *testing.T) {
|
||||||
// Create input, accept the first rune.
|
// Create input, accept the first rune.
|
||||||
i := NewAPI("Testing")
|
i := NewAPI("Testing")
|
||||||
r, _, _ := i.PeekRune(0)
|
r, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // T
|
i.Input.AcceptRune(r) // T
|
||||||
AssertEqual(t, "T", i.Output.String(), "accepted rune in input")
|
AssertEqual(t, "T", i.Output.String(), "accepted rune in input")
|
||||||
|
|
||||||
// Fork
|
// Fork
|
||||||
|
@ -17,10 +17,10 @@ func TestFork_CreatesForkOfInputAtSameCursorPosition(t *testing.T) {
|
||||||
AssertEqual(t, 1, i.stackFrame.offset, "child offset")
|
AssertEqual(t, 1, i.stackFrame.offset, "child offset")
|
||||||
|
|
||||||
// Accept two runes via fork.
|
// Accept two runes via fork.
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // e
|
i.Input.AcceptRune(r) // e
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // s
|
i.Input.AcceptRune(r) // s
|
||||||
AssertEqual(t, "es", i.Output.String(), "result runes in fork")
|
AssertEqual(t, "es", i.Output.String(), "result runes in fork")
|
||||||
AssertEqual(t, 1, i.stackFrames[i.stackLevel-1].offset, "parent offset")
|
AssertEqual(t, 1, i.stackFrames[i.stackLevel-1].offset, "parent offset")
|
||||||
AssertEqual(t, 3, i.stackFrame.offset, "child offset")
|
AssertEqual(t, 3, i.stackFrame.offset, "child offset")
|
||||||
|
@ -34,16 +34,16 @@ func TestFork_CreatesForkOfInputAtSameCursorPosition(t *testing.T) {
|
||||||
|
|
||||||
func TestGivenForkedChildWhichAcceptedRune_AfterMerging_RuneEndsUpInParentResult(t *testing.T) {
|
func TestGivenForkedChildWhichAcceptedRune_AfterMerging_RuneEndsUpInParentResult(t *testing.T) {
|
||||||
i := NewAPI("Testing")
|
i := NewAPI("Testing")
|
||||||
r, _, _ := i.PeekRune(0)
|
r, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // T
|
i.Input.AcceptRune(r) // T
|
||||||
|
|
||||||
f1 := i.Fork()
|
f1 := i.Fork()
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // e
|
i.Input.AcceptRune(r) // e
|
||||||
|
|
||||||
f2 := i.Fork()
|
f2 := i.Fork()
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // s
|
i.Input.AcceptRune(r) // s
|
||||||
AssertEqual(t, "s", i.Output.String(), "f2 String()")
|
AssertEqual(t, "s", i.Output.String(), "f2 String()")
|
||||||
AssertEqual(t, 3, i.stackFrame.offset, "f2.offset A")
|
AssertEqual(t, 3, i.stackFrame.offset, "f2.offset A")
|
||||||
|
|
||||||
|
@ -63,25 +63,25 @@ func TestFlushInput(t *testing.T) {
|
||||||
|
|
||||||
// Flushing without any read data is okay. FlushInput() will return
|
// Flushing without any read data is okay. FlushInput() will return
|
||||||
// false in this case, and nothing else happens.
|
// false in this case, and nothing else happens.
|
||||||
AssertTrue(t, i.FlushInput() == false, "flush input at start")
|
AssertTrue(t, i.Input.Flush() == false, "flush input at start")
|
||||||
|
|
||||||
r, _, _ := i.PeekRune(0)
|
r, _, _ := i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // c
|
i.Input.AcceptRune(r) // c
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // o
|
i.Input.AcceptRune(r) // o
|
||||||
|
|
||||||
AssertTrue(t, i.FlushInput() == true, "flush input after reading some data")
|
AssertTrue(t, i.Input.Flush() == true, "flush input after reading some data")
|
||||||
AssertEqual(t, 0, i.stackFrame.offset, "offset after flush input")
|
AssertEqual(t, 0, i.stackFrame.offset, "offset after flush input")
|
||||||
|
|
||||||
AssertTrue(t, i.FlushInput() == false, "flush input after flush input")
|
AssertTrue(t, i.Input.Flush() == false, "flush input after flush input")
|
||||||
|
|
||||||
// Read offset is now zero, but reading should continue after "co".
|
// Read offset is now zero, but reading should continue after "co".
|
||||||
// The output so far isn't modified, so the following accept calls
|
// The output so far isn't modified, so the following accept calls
|
||||||
// will add their runes to the already accepted string "co".
|
// will add their runes to the already accepted string "co".
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // o
|
i.Input.AcceptRune(r) // o
|
||||||
r, _, _ = i.PeekRune(0)
|
r, _, _ = i.Input.PeekRune(0)
|
||||||
i.AcceptRune(r) // o
|
i.Input.AcceptRune(r) // o
|
||||||
|
|
||||||
AssertEqual(t, "cool", i.Output.String(), "end result")
|
AssertEqual(t, "cool", i.Output.String(), "end result")
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue