Wow, going nicely! Some more miliseconds stripped.

This commit is contained in:
Maurice Makaay 2019-07-26 22:56:12 +00:00
parent daf3b9838f
commit fcdd3d4ea7
16 changed files with 426 additions and 416 deletions

View File

@ -87,10 +87,10 @@ func (c *simpleCalculator) number(p *parse.API) {
func (c *simpleCalculator) operatorOrEndOfFile(p *parse.API) {
var A = tokenize.A
switch {
case p.Accept(A.Add):
case p.Skip(A.Add):
c.op = +1
p.Handle(c.number)
case p.Accept(A.Subtract):
case p.Skip(A.Subtract):
c.op = -1
p.Handle(c.number)
case !p.Peek(A.EndOfFile):

View File

@ -98,7 +98,7 @@ func (calc *calculator) expr(p *parse.API) {
var A = tokenize.A
if p.Handle(calc.term) {
for p.Accept(A.Add.Or(A.Subtract)) {
op := p.Result.Runes[0]
op := p.Result.Bytes[0]
if !p.Handle(calc.term) {
return
}
@ -116,7 +116,7 @@ func (calc *calculator) term(p *parse.API) {
var A = tokenize.A
if p.Handle(calc.factor) {
for p.Accept(A.Multiply.Or(A.Divide)) {
op := p.Result.Runes[0]
op := p.Result.Bytes[0]
if !p.Handle(calc.factor) {
return
}
@ -131,16 +131,16 @@ func (calc *calculator) term(p *parse.API) {
// <factor> = <space> (FLOAT | LPAREN <expr> RPAREN) <space>
func (calc *calculator) factor(p *parse.API) {
var A, T = tokenize.A, tokenize.T
p.Accept(A.Blanks)
p.Skip(A.Blanks)
switch {
case p.Accept(T.Float64(nil, A.Signed(A.Decimal))):
value := p.Result.Tokens[0].Value.(float64)
calc.interpreter.pushValue(value)
case p.Accept(A.LeftParen):
case p.Skip(A.LeftParen):
if !p.Handle(calc.expr) {
return
}
if !p.Accept(A.RightParen) {
if !p.Skip(A.RightParen) {
p.Expected("')'")
return
}
@ -148,7 +148,7 @@ func (calc *calculator) factor(p *parse.API) {
p.Expected("factor or parenthesized expression")
return
}
p.Accept(A.Blanks)
p.Skip(A.Blanks)
}
// ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――
@ -186,7 +186,7 @@ func (i *interpreter) pushValue(value float64) {
i.top.a, i.top.b = i.top.b, value
}
func (i *interpreter) eval(op rune) float64 {
func (i *interpreter) eval(op byte) float64 {
value := i.top.a
switch op {
case '+':

View File

@ -83,7 +83,7 @@ func (h *helloparser1) Parse(input string) (string, error) {
func (h *helloparser1) start(p *parse.API) {
a := tokenize.A
if p.Accept(a.StrNoCase("hello")) {
if p.Skip(a.StrNoCase("hello")) {
p.Handle(h.comma)
} else {
p.Expected("hello")
@ -92,8 +92,8 @@ func (h *helloparser1) start(p *parse.API) {
func (h *helloparser1) comma(p *parse.API) {
a := tokenize.A
p.Accept(a.Blanks)
if p.Accept(a.Comma) {
p.Skip(a.Blanks)
if p.Skip(a.Comma) {
p.Handle(h.startName)
} else {
p.Expected("comma")
@ -102,7 +102,7 @@ func (h *helloparser1) comma(p *parse.API) {
func (h *helloparser1) startName(p *parse.API) {
a := tokenize.A
p.Accept(a.Blanks)
p.Skip(a.Blanks)
if p.Peek(a.AnyRune) {
p.Handle(h.name)
} else {
@ -125,7 +125,7 @@ func (h *helloparser1) name(p *parse.API) {
func (h *helloparser1) exclamation(p *parse.API) {
a := tokenize.A
if p.Accept(a.Excl) {
if p.Skip(a.Excl) {
p.Handle(h.end)
} else {
p.Expected("exclamation")
@ -137,7 +137,7 @@ func (h *helloparser1) exclamation(p *parse.API) {
// error message.
func (h *helloparser1) end(p *parse.API) {
var a = tokenize.A
if !p.Accept(a.EndOfFile) {
if !p.Skip(a.EndOfFile) {
p.Expected("end of greeting")
return
}

View File

@ -81,11 +81,11 @@ func (h *helloparser2) Parse(input string) (string, error) {
func (h *helloparser2) start(p *parse.API) {
c, a, m := tokenize.C, tokenize.A, tokenize.M
if !p.Accept(a.StrNoCase("hello")) {
if !p.Skip(a.StrNoCase("hello")) {
p.SetError("the greeting is not being friendly")
return
}
if !p.Accept(c.Seq(c.Optional(a.Blanks), a.Comma, c.Optional(a.Blanks))) {
if !p.Skip(c.Seq(c.Optional(a.Blanks), a.Comma, c.Optional(a.Blanks))) {
p.SetError("the greeting is not properly separated")
return
}
@ -99,9 +99,9 @@ func (h *helloparser2) start(p *parse.API) {
p.SetError("the greeting is targeted at thin air")
return
}
if !p.Accept(a.Excl) {
if !p.Skip(a.Excl) {
p.SetError("the greeting is not loud enough")
} else if !p.Accept(a.EndOfFile) {
} else if !p.Skip(a.EndOfFile) {
p.SetError("too much stuff going on after the closing '!'")
} else {
p.Stop()

View File

@ -32,13 +32,12 @@ type API struct {
func (parseAPI *API) PeekWithResult(tokenHandler tokenize.Handler) bool {
tokenAPI := parseAPI.tokenAPI
snap := tokenAPI.MakeSnapshot()
_, ok := parseAPI.invokeTokenizeHandler("Peek", tokenHandler)
parseAPI.Result.Tokens = nil
parseAPI.Result.Bytes = nil
ok := parseAPI.invokeTokenizeHandler("PeekWithResult", tokenHandler)
if ok {
parseAPI.Result.Tokens = tokenAPI.Output.Tokens()
parseAPI.Result.Runes = tokenAPI.Output.Runes() // TODO use bytes!
} else {
parseAPI.Result.Tokens = nil
parseAPI.Result.Runes = nil
parseAPI.Result.Bytes = tokenAPI.Output.Bytes()
}
tokenAPI.RestoreSnapshot(snap)
return ok
@ -54,9 +53,9 @@ func (parseAPI *API) PeekWithResult(tokenHandler tokenize.Handler) bool {
func (parseAPI *API) Peek(tokenHandler tokenize.Handler) bool {
tokenAPI := parseAPI.tokenAPI
snap := tokenAPI.MakeSnapshot()
_, ok := parseAPI.invokeTokenizeHandler("Peek", tokenHandler)
parseAPI.Result.Tokens = nil
parseAPI.Result.Runes = nil
parseAPI.Result.Bytes = nil
ok := parseAPI.invokeTokenizeHandler("Peek", tokenHandler)
tokenAPI.RestoreSnapshot(snap)
return ok
}
@ -69,41 +68,51 @@ func (parseAPI *API) Peek(tokenHandler tokenize.Handler) bool {
// After calling this method, you can retrieve the results through the API.Result field.
func (parseAPI *API) Accept(tokenHandler tokenize.Handler) bool {
tokenAPI := parseAPI.tokenAPI
_, ok := parseAPI.invokeTokenizeHandler("Accept", tokenHandler)
snap := tokenAPI.MakeSnapshot()
ok := parseAPI.invokeTokenizeHandler("Accept", tokenHandler)
if ok {
// Keep track of the results as produced by this child.
// TODO put in function and also in Peek() Record Cursor() / error too?
parseAPI.Result.Tokens = tokenAPI.Output.Tokens()
parseAPI.Result.Runes = tokenAPI.Output.Runes()
parseAPI.Result.Bytes = tokenAPI.Output.Bytes()
// Now the results are stored, we can reset the results for the next handler.
tokenAPI.Output.Reset()
// Now the results are stored, we can flush the results.
// This does not empty the byte and token store, but it does move the
// pointers within those stores back to the start. By doing this,
// the stores will be reused for the upcoming calls, which saves on
// memory usage.
tokenAPI.Output.Flush()
// And flush the input reader buffer.
// Also flush the input reader buffer. Accepting input means that we
// are moving forward in the input file and that already read input
// can therefore be cleared. Doing so saves on memory usage.
tokenAPI.Input.Flush()
} else {
// No match, so reset the tokenize.API for the next handler.
tokenAPI.RestoreSnapshot(snap)
}
return ok
}
// TODO make a func Skip() which is like Accept() but without storing results.
func (parseAPI *API) Skip(tokenHandler tokenize.Handler) bool {
tokenAPI := parseAPI.tokenAPI
if !parseAPI.invokeTokenizeHandler("Skip", tokenHandler) {
return false
}
parseAPI.Result.Tokens = nil
parseAPI.Result.Bytes = nil
tokenAPI.Output.Flush()
tokenAPI.Input.Flush()
return true
}
// invokeTokenizeHandler forks the tokenize.API, and invokes the tokenize.Handler
// in the context of the created child. The child is returned, so the caller
// has full control over merging and disposing the child.
func (parseAPI *API) invokeTokenizeHandler(name string, tokenHandler tokenize.Handler) (int, bool) {
func (parseAPI *API) invokeTokenizeHandler(name string, tokenHandler tokenize.Handler) bool {
parseAPI.panicWhenStoppedOrInError(name)
if tokenHandler == nil {
callerPanic(name, "parsekit.parse.API.{name}(): {name}() called with nil tokenHandler argument at {caller}")
}
//child := parseAPI.tokenAPI.Fork()
ok := tokenHandler(parseAPI.tokenAPI)
return 0, ok
return tokenHandler(parseAPI.tokenAPI)
}
// panicWhenStoppedOrInError will panic when the parser has produced an error

View File

@ -344,7 +344,8 @@ func TestInputLargerThanDefaultBufSize_WithFirstReadToLastByte(t *testing.T) {
func TestAllocationPatterns(t *testing.T) {
input, _ := makeLargeStubReader()
r := New(input)
buf := New(input)
r := &buf
// The first read will create the standard buffer and fill it with data.
// The first rune is requested, but there's more input data availble,

View File

@ -91,16 +91,11 @@ type stackFrame struct {
tokenEnd int // the end point in the API.tokens slice for tokens produced by this stack level
}
const initialTokenStoreLength = 64
const initialByteStoreLength = 128
// NewAPI initializes a new API struct, wrapped around the provided input.
// For an overview of allowed inputs, take a look at the documentation
// for parsekit.read.New().
func NewAPI(input interface{}) *API {
tokenAPI := &API{
// outputBytes: make([]byte, initialByteStoreLength),
// outputTokens: make([]Token, initialTokenStoreLength),
reader: read.New(input),
}
tokenAPI.Input = Input{api: tokenAPI}

View File

@ -0,0 +1,17 @@
package tokenize
import "testing"
func TestMoveCursorByBytes(t *testing.T) {
tokenAPI := NewAPI("")
tokenAPI.Byte.MoveCursor('a')
tokenAPI.Byte.MoveCursor('b')
tokenAPI.Byte.MoveCursor('c')
tokenAPI.Byte.MoveCursor('\r')
tokenAPI.Byte.MoveCursor('\n')
tokenAPI.Byte.MoveCursor('a')
tokenAPI.Byte.MoveCursor('b')
AssertEqual(t, "line 2, column 3", tokenAPI.Input.Cursor(), "Cursor position after moving by byte")
AssertEqual(t, 7, tokenAPI.pointers.offset, "Offset after moving by byte")
}

View File

@ -24,12 +24,10 @@ func (i Input) Cursor() string {
// Note: in most cases, you won't have to call this method yourself.
// Parsekit will call this method at points where it knows it is a
// safe thing to do.
func (i Input) Flush() bool {
func (i Input) Flush() {
a := i.api
if a.pointers.offset > 0 {
a.reader.Flush(a.pointers.offset)
a.pointers.offset = 0
return true
}
return false
}

View File

@ -9,10 +9,13 @@ type Output struct {
api *API
}
func (o Output) String() string {
func (o Output) Bytes() []byte {
a := o.api
bytes := a.outputBytes[a.pointers.bytesStart:a.pointers.bytesEnd]
return string(bytes)
return a.outputBytes[a.pointers.bytesStart:a.pointers.bytesEnd]
}
func (o Output) String() string {
return string(o.Bytes())
}
func (o Output) Runes() []rune {
@ -25,10 +28,12 @@ func (o Output) Rune(offset int) rune {
return r
}
func (o Output) Reset() {
func (o Output) Flush() {
a := o.api
a.pointers.bytesEnd = a.pointers.bytesStart
a.pointers.tokenEnd = a.pointers.tokenStart
a.pointers.bytesStart = 0
a.pointers.bytesEnd = 0
a.pointers.tokenStart = 0
a.pointers.tokenEnd = 0
}
func (o Output) ClearData() {

11
tokenize/api_result.go Normal file
View File

@ -0,0 +1,11 @@
package tokenize
// Result holds the bytes and tokens as produced by the tokenizer.
type Result struct {
Tokens []Token
Bytes []byte
}
func (result *Result) String() string {
return string(result.Bytes)
}

View File

@ -4,20 +4,6 @@ import (
"testing"
)
func TestMoveCursorByBytes(t *testing.T) {
tokenAPI := NewAPI("")
tokenAPI.Byte.MoveCursor('a')
tokenAPI.Byte.MoveCursor('b')
tokenAPI.Byte.MoveCursor('c')
tokenAPI.Byte.MoveCursor('\r')
tokenAPI.Byte.MoveCursor('\n')
tokenAPI.Byte.MoveCursor('a')
tokenAPI.Byte.MoveCursor('b')
AssertEqual(t, "line 2, column 3", tokenAPI.Input.Cursor(), "Cursor position after moving by byte")
AssertEqual(t, 7, tokenAPI.stackFrame.offset, "Offset after moving by byte")
}
func TestMoveCursorByRunes(t *testing.T) {
tokenAPI := NewAPI("")
tokenAPI.Rune.MoveCursor('ɹ')
@ -28,7 +14,7 @@ func TestMoveCursorByRunes(t *testing.T) {
tokenAPI.Rune.MoveCursor('ǝ')
AssertEqual(t, "line 2, column 2", tokenAPI.Input.Cursor(), "Cursor position after moving by rune")
AssertEqual(t, 8, tokenAPI.stackFrame.offset, "Offset after moving by rune")
AssertEqual(t, 8, tokenAPI.pointers.offset, "Offset after moving by rune")
}
func TestWhenMovingCursor_CursorPositionIsUpdated(t *testing.T) {
@ -55,11 +41,11 @@ func TestWhenMovingCursor_CursorPositionIsUpdated(t *testing.T) {
tokenAPI.Rune.MoveCursor(r)
}
}
if tokenAPI.stackFrame.line != test.line {
t.Errorf("[%s] Unexpected line offset %d (expected %d)", test.name, tokenAPI.stackFrame.line, test.line)
if tokenAPI.pointers.line != test.line {
t.Errorf("[%s] Unexpected line offset %d (expected %d)", test.name, tokenAPI.pointers.line, test.line)
}
if tokenAPI.stackFrame.column != test.column {
t.Errorf("[%s] Unexpected column offset %d (expected %d)", test.name, tokenAPI.stackFrame.column, test.column)
if tokenAPI.pointers.column != test.column {
t.Errorf("[%s] Unexpected column offset %d (expected %d)", test.name, tokenAPI.pointers.column, test.column)
}
}
}

View File

@ -171,196 +171,196 @@ func ExampleAPI_modifyingResults() {
// API second result token: 73("Zaphod")
}
func ExampleAPI_Reset() {
tokenAPI := tokenize.NewAPI("Very important input!")
// func ExampleAPI_Reset() {
// tokenAPI := tokenize.NewAPI("Very important input!")
r, _, _ := tokenAPI.Rune.Peek(0) // read 'V'
tokenAPI.Rune.Accept(r)
r, _, _ = tokenAPI.Rune.Peek(0) // read 'e'
tokenAPI.Rune.Accept(r)
fmt.Printf("API results: %q at %s\n", tokenAPI.Output.String(), tokenAPI.Input.Cursor())
// r, _, _ := tokenAPI.Rune.Peek(0) // read 'V'
// tokenAPI.Rune.Accept(r)
// r, _, _ = tokenAPI.Rune.Peek(0) // read 'e'
// tokenAPI.Rune.Accept(r)
// fmt.Printf("API results: %q at %s\n", tokenAPI.Output.String(), tokenAPI.Input.Cursor())
// Reset input and output.
tokenAPI.Input.Reset()
tokenAPI.Output.Reset()
fmt.Printf("API results: %q at %s\n", tokenAPI.Output.String(), tokenAPI.Input.Cursor())
// // Reset input and output.
// tokenAPI.Input.Reset()
// tokenAPI.Output.Reset()
// fmt.Printf("API results: %q at %s\n", tokenAPI.Output.String(), tokenAPI.Input.Cursor())
// So then doing the same read operations, the same data are read.
r, _, _ = tokenAPI.Rune.Peek(0) // read 'V'
tokenAPI.Rune.Accept(r)
r, _, _ = tokenAPI.Rune.Peek(0) // read 'e'
tokenAPI.Rune.Accept(r)
fmt.Printf("API results: %q at %s\n", tokenAPI.Output.String(), tokenAPI.Input.Cursor())
// // So then doing the same read operations, the same data are read.
// r, _, _ = tokenAPI.Rune.Peek(0) // read 'V'
// tokenAPI.Rune.Accept(r)
// r, _, _ = tokenAPI.Rune.Peek(0) // read 'e'
// tokenAPI.Rune.Accept(r)
// fmt.Printf("API results: %q at %s\n", tokenAPI.Output.String(), tokenAPI.Input.Cursor())
// Output:
// API results: "Ve" at line 1, column 3
// API results: "" at start of file
// API results: "Ve" at line 1, column 3
}
// // Output:
// // API results: "Ve" at line 1, column 3
// // API results: "" at start of file
// // API results: "Ve" at line 1, column 3
// }
func ExampleAPI_Fork() {
// This custom Handler checks for input 'a', 'b' or 'c'.
abcHandler := func(t *tokenize.API) bool {
a := tokenize.A
for _, r := range []rune{'a', 'b', 'c'} {
child := t.Fork() // fork, so we won't change parent t
if a.Rune(r)(t) {
t.Merge(child) // accept results into parent of child
t.Dispose(child) // return to the parent level
return true // and report a successful match
}
t.Dispose(child) // return to the parent level
}
// If we get here, then no match was found. Return false to communicate
// this to the caller.
return false
}
// func ExampleAPI_Fork() {
// // This custom Handler checks for input 'a', 'b' or 'c'.
// abcHandler := func(t *tokenize.API) bool {
// a := tokenize.A
// for _, r := range []rune{'a', 'b', 'c'} {
// child := t.Fork() // fork, so we won't change parent t
// if a.Rune(r)(t) {
// t.Merge(child) // accept results into parent of child
// t.Dispose(child) // return to the parent level
// return true // and report a successful match
// }
// t.Dispose(child) // return to the parent level
// }
// // If we get here, then no match was found. Return false to communicate
// // this to the caller.
// return false
// }
// Note: a custom Handler is normally not what you need.
// You can make use of the parser/combinator tooling to make the
// implementation a lot simpler and to take care of forking at
// the appropriate places. The handler from above can be replaced with:
simpler := tokenize.A.RuneRange('a', 'c')
// // Note: a custom Handler is normally not what you need.
// // You can make use of the parser/combinator tooling to make the
// // implementation a lot simpler and to take care of forking at
// // the appropriate places. The handler from above can be replaced with:
// simpler := tokenize.A.RuneRange('a', 'c')
result, err := tokenize.New(abcHandler)("another test")
fmt.Println(result, err)
result, err = tokenize.New(simpler)("curious")
fmt.Println(result, err)
result, err = tokenize.New(abcHandler)("bang on!")
fmt.Println(result, err)
result, err = tokenize.New(abcHandler)("not a match")
fmt.Println(result, err)
// result, err := tokenize.New(abcHandler)("another test")
// fmt.Println(result, err)
// result, err = tokenize.New(simpler)("curious")
// fmt.Println(result, err)
// result, err = tokenize.New(abcHandler)("bang on!")
// fmt.Println(result, err)
// result, err = tokenize.New(abcHandler)("not a match")
// fmt.Println(result, err)
// Output:
// a <nil>
// c <nil>
// b <nil>
// <nil> mismatch at start of file
}
// // Output:
// // a <nil>
// // c <nil>
// // b <nil>
// // <nil> mismatch at start of file
// }
func ExampleAPI_Merge() {
tokenHandler := func(t *tokenize.API) bool {
child1 := t.Fork()
r0, _, _ := t.Rune.Peek(0) // reads 'H'
r1, _, _ := t.Rune.Peek(1) // reads 'i'
t.Rune.AcceptMulti(r0, r1) // these runes are accepted in the API results for child1
// func ExampleAPI_Merge() {
// tokenHandler := func(t *tokenize.API) bool {
// child1 := t.Fork()
// r0, _, _ := t.Rune.Peek(0) // reads 'H'
// r1, _, _ := t.Rune.Peek(1) // reads 'i'
// t.Rune.AcceptMulti(r0, r1) // these runes are accepted in the API results for child1
child2 := t.Fork()
r0, _, _ = t.Rune.Peek(0) // reads ' '
r1, _, _ = t.Rune.Peek(1) // reads 'm'
t.Rune.AcceptMulti(r0, r1) // these runes are accepted in the API results for child2
t.Dispose(child2) // but they are not merged and thefore not used by child1
// child2 := t.Fork()
// r0, _, _ = t.Rune.Peek(0) // reads ' '
// r1, _, _ = t.Rune.Peek(1) // reads 'm'
// t.Rune.AcceptMulti(r0, r1) // these runes are accepted in the API results for child2
// t.Dispose(child2) // but they are not merged and thefore not used by child1
t.Merge(child1) // We merge child1, which has read 'H' and 'i' only.
t.Dispose(child1) // and clean up child1 to return to the parent
return true
}
// t.Merge(child1) // We merge child1, which has read 'H' and 'i' only.
// t.Dispose(child1) // and clean up child1 to return to the parent
// return true
// }
result, _ := tokenize.New(tokenHandler)("Hi mister X!")
fmt.Println(result.String())
// result, _ := tokenize.New(tokenHandler)("Hi mister X!")
// fmt.Println(result.String())
// Output:
// Hi
}
// // Output:
// // Hi
// }
func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
tokenAPI := tokenize.NewAPI("abcdefghijklmnopqrstuvwxyz")
// func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
// tokenAPI := tokenize.NewAPI("abcdefghijklmnopqrstuvwxyz")
// Fork a few levels.
child1 := tokenAPI.Fork()
child2 := tokenAPI.Fork()
child3 := tokenAPI.Fork()
child4 := tokenAPI.Fork()
// // Fork a few levels.
// child1 := tokenAPI.Fork()
// child2 := tokenAPI.Fork()
// child3 := tokenAPI.Fork()
// child4 := tokenAPI.Fork()
// Read a rune 'a' from child4.
r, _, _ := tokenAPI.Rune.Peek(0)
AssertEqual(t, 'a', r, "child4 rune 1")
tokenAPI.Rune.Accept(r)
AssertEqual(t, "a", tokenAPI.Output.String(), "child4 runes after rune 1")
// // Read a rune 'a' from child4.
// r, _, _ := tokenAPI.Rune.Peek(0)
// AssertEqual(t, 'a', r, "child4 rune 1")
// tokenAPI.Rune.Accept(r)
// AssertEqual(t, "a", tokenAPI.Output.String(), "child4 runes after rune 1")
// Read another rune 'b' from child4.
r, _, _ = tokenAPI.Rune.Peek(0)
AssertEqual(t, 'b', r, "child4 rune 2")
tokenAPI.Rune.Accept(r)
AssertEqual(t, "ab", tokenAPI.Output.String(), "child4 runes after rune 2")
// // Read another rune 'b' from child4.
// r, _, _ = tokenAPI.Rune.Peek(0)
// AssertEqual(t, 'b', r, "child4 rune 2")
// tokenAPI.Rune.Accept(r)
// AssertEqual(t, "ab", tokenAPI.Output.String(), "child4 runes after rune 2")
// Merge "ab" from child4 to child3.
tokenAPI.Merge(child4)
AssertEqual(t, "", tokenAPI.Output.String(), "child4 runes after first merge")
// // Merge "ab" from child4 to child3.
// tokenAPI.Merge(child4)
// AssertEqual(t, "", tokenAPI.Output.String(), "child4 runes after first merge")
// Read some more from child4.
r, _, _ = tokenAPI.Rune.Peek(0)
AssertEqual(t, 'c', r, "child4 rune 3")
tokenAPI.Rune.Accept(r)
AssertEqual(t, "c", tokenAPI.Output.String(), "child4 runes after rune 1")
AssertEqual(t, "line 1, column 4", tokenAPI.Input.Cursor(), "cursor child4 rune 3")
// // Read some more from child4.
// r, _, _ = tokenAPI.Rune.Peek(0)
// AssertEqual(t, 'c', r, "child4 rune 3")
// tokenAPI.Rune.Accept(r)
// AssertEqual(t, "c", tokenAPI.Output.String(), "child4 runes after rune 1")
// AssertEqual(t, "line 1, column 4", tokenAPI.Input.Cursor(), "cursor child4 rune 3")
// Merge "c" from child4 to child3.
tokenAPI.Merge(child4)
// // Merge "c" from child4 to child3.
// tokenAPI.Merge(child4)
// And dispose of child4, making child3 the active stack level.
tokenAPI.Dispose(child4)
// // And dispose of child4, making child3 the active stack level.
// tokenAPI.Dispose(child4)
// Child3 should now have the compbined results "abc" from child4's work.
AssertEqual(t, "abc", tokenAPI.Output.String(), "child3 after merge of child4")
AssertEqual(t, "line 1, column 4", tokenAPI.Input.Cursor(), "cursor child3 rune 3, after merge of child4")
// // Child3 should now have the compbined results "abc" from child4's work.
// AssertEqual(t, "abc", tokenAPI.Output.String(), "child3 after merge of child4")
// AssertEqual(t, "line 1, column 4", tokenAPI.Input.Cursor(), "cursor child3 rune 3, after merge of child4")
// Now read some data from child3.
r, _, _ = tokenAPI.Rune.Peek(0)
AssertEqual(t, 'd', r, "child3 rune 5")
tokenAPI.Rune.Accept(r)
// // Now read some data from child3.
// r, _, _ = tokenAPI.Rune.Peek(0)
// AssertEqual(t, 'd', r, "child3 rune 5")
// tokenAPI.Rune.Accept(r)
r, _, _ = tokenAPI.Rune.Peek(0)
AssertEqual(t, 'e', r, "child3 rune 5")
tokenAPI.Rune.Accept(r)
// r, _, _ = tokenAPI.Rune.Peek(0)
// AssertEqual(t, 'e', r, "child3 rune 5")
// tokenAPI.Rune.Accept(r)
r, _, _ = tokenAPI.Rune.Peek(0)
AssertEqual(t, 'f', r, "child3 rune 5")
tokenAPI.Rune.Accept(r)
// r, _, _ = tokenAPI.Rune.Peek(0)
// AssertEqual(t, 'f', r, "child3 rune 5")
// tokenAPI.Rune.Accept(r)
AssertEqual(t, "abcdef", tokenAPI.Output.String(), "child3 total result after rune 6")
// AssertEqual(t, "abcdef", tokenAPI.Output.String(), "child3 total result after rune 6")
// Temporarily go some new forks from here, but don't use their outcome.
child3sub1 := tokenAPI.Fork()
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r)
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r)
child3sub2 := tokenAPI.Fork()
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r)
tokenAPI.Merge(child3sub2) // do merge sub2 down to sub1
tokenAPI.Dispose(child3sub2) // and dispose of sub2
tokenAPI.Dispose(child3sub1) // but dispose of sub1 without merging
// // Temporarily go some new forks from here, but don't use their outcome.
// child3sub1 := tokenAPI.Fork()
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r)
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r)
// child3sub2 := tokenAPI.Fork()
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r)
// tokenAPI.Merge(child3sub2) // do merge sub2 down to sub1
// tokenAPI.Dispose(child3sub2) // and dispose of sub2
// tokenAPI.Dispose(child3sub1) // but dispose of sub1 without merging
// Instead merge the results from before this forking segway from child3 to child2
// and dispose of it.
tokenAPI.Merge(child3)
tokenAPI.Dispose(child3)
// // Instead merge the results from before this forking segway from child3 to child2
// // and dispose of it.
// tokenAPI.Merge(child3)
// tokenAPI.Dispose(child3)
AssertEqual(t, "abcdef", tokenAPI.Output.String(), "child2 total result after merge of child3")
AssertEqual(t, "line 1, column 7", tokenAPI.Input.Cursor(), "cursor child2 after merge child3")
// AssertEqual(t, "abcdef", tokenAPI.Output.String(), "child2 total result after merge of child3")
// AssertEqual(t, "line 1, column 7", tokenAPI.Input.Cursor(), "cursor child2 after merge child3")
// Merge child2 to child1 and dispose of it.
tokenAPI.Merge(child2)
tokenAPI.Dispose(child2)
// // Merge child2 to child1 and dispose of it.
// tokenAPI.Merge(child2)
// tokenAPI.Dispose(child2)
// Merge child1 a few times to the top level api.
tokenAPI.Merge(child1)
tokenAPI.Merge(child1)
tokenAPI.Merge(child1)
tokenAPI.Merge(child1)
// // Merge child1 a few times to the top level api.
// tokenAPI.Merge(child1)
// tokenAPI.Merge(child1)
// tokenAPI.Merge(child1)
// tokenAPI.Merge(child1)
// And dispose of it.
tokenAPI.Dispose(child1)
// // And dispose of it.
// tokenAPI.Dispose(child1)
// Read some data from the top level api.
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r)
// // Read some data from the top level api.
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r)
AssertEqual(t, "abcdefg", tokenAPI.Output.String(), "api string end result")
AssertEqual(t, "line 1, column 8", tokenAPI.Input.Cursor(), "api cursor end result")
}
// AssertEqual(t, "abcdefg", tokenAPI.Output.String(), "api string end result")
// AssertEqual(t, "line 1, column 8", tokenAPI.Input.Cursor(), "api cursor end result")
// }
func TestClearData(t *testing.T) {
tokenAPI := tokenize.NewAPI("Laphroaig")
@ -377,42 +377,42 @@ func TestClearData(t *testing.T) {
AssertEqual(t, "ph", tokenAPI.Output.String(), "api string end result")
}
func TestMergeScenariosForTokens(t *testing.T) {
tokenAPI := tokenize.NewAPI("")
// func TestMergeScenariosForTokens(t *testing.T) {
// tokenAPI := tokenize.NewAPI("")
token1 := tokenize.Token{Value: 1}
token2 := tokenize.Token{Value: 2}
token3 := tokenize.Token{Value: 3}
token4 := tokenize.Token{Value: 4}
// token1 := tokenize.Token{Value: 1}
// token2 := tokenize.Token{Value: 2}
// token3 := tokenize.Token{Value: 3}
// token4 := tokenize.Token{Value: 4}
tokenAPI.Output.SetTokens(token1)
tokens := tokenAPI.Output.Tokens()
AssertEqual(t, 1, len(tokens), "Tokens 1")
// tokenAPI.Output.SetTokens(token1)
// tokens := tokenAPI.Output.Tokens()
// AssertEqual(t, 1, len(tokens), "Tokens 1")
child := tokenAPI.Fork()
// child := tokenAPI.Fork()
tokens = tokenAPI.Output.Tokens()
AssertEqual(t, 0, len(tokens), "Tokens 2")
// tokens = tokenAPI.Output.Tokens()
// AssertEqual(t, 0, len(tokens), "Tokens 2")
tokenAPI.Output.AddToken(token2)
// tokenAPI.Output.AddToken(token2)
tokenAPI.Merge(child)
tokenAPI.Dispose(child)
// tokenAPI.Merge(child)
// tokenAPI.Dispose(child)
tokens = tokenAPI.Output.Tokens()
AssertEqual(t, 2, len(tokens), "Tokens 3")
// tokens = tokenAPI.Output.Tokens()
// AssertEqual(t, 2, len(tokens), "Tokens 3")
child = tokenAPI.Fork()
tokenAPI.Output.AddToken(token3)
tokenAPI.Output.Reset()
tokenAPI.Output.AddToken(token4)
// child = tokenAPI.Fork()
// tokenAPI.Output.AddToken(token3)
// tokenAPI.Output.Reset()
// tokenAPI.Output.AddToken(token4)
tokenAPI.Merge(child)
tokenAPI.Dispose(child)
// tokenAPI.Merge(child)
// tokenAPI.Dispose(child)
tokens = tokenAPI.Output.Tokens()
AssertEqual(t, 3, len(tokens), "Tokens 4")
AssertEqual(t, 1, tokenAPI.Output.TokenValue(0).(int), "Tokens 4, value 0")
AssertEqual(t, 2, tokenAPI.Output.TokenValue(1).(int), "Tokens 4, value 1")
AssertEqual(t, 4, tokenAPI.Output.TokenValue(2).(int), "Tokens 4, value 2")
}
// tokens = tokenAPI.Output.Tokens()
// AssertEqual(t, 3, len(tokens), "Tokens 4")
// AssertEqual(t, 1, tokenAPI.Output.TokenValue(0).(int), "Tokens 4, value 0")
// AssertEqual(t, 2, tokenAPI.Output.TokenValue(1).(int), "Tokens 4, value 1")
// AssertEqual(t, 4, tokenAPI.Output.TokenValue(2).(int), "Tokens 4, value 2")
// }

View File

@ -11,16 +11,6 @@ import (
// Result struct (possibly nil) and an error (possibly nil).
type Func func(input interface{}) (*Result, error)
// Result holds the runes and tokens as produced by the tokenizer.
type Result struct {
Tokens []Token
Runes []rune
}
func (result *Result) String() string {
return string(result.Runes)
}
// New instantiates a new tokenizer.
//
// The tokenizer is a tokenizing state machine, in which tokenize.Handler
@ -47,7 +37,7 @@ func New(tokenHandler Handler) Func {
return nil, err
}
result := &Result{
Runes: tokenAPI.Output.Runes(),
Bytes: tokenAPI.Output.Bytes(),
Tokens: tokenAPI.Output.Tokens(),
}
return result, nil

View File

@ -72,65 +72,65 @@ func TestInputCanAcceptRunesFromReader(t *testing.T) {
AssertEqual(t, "Tes", tokenAPI.Output.String(), "i.String()")
}
func TestCallingMergeOnTopLevelAPI_Panics(t *testing.T) {
AssertPanic(t, PanicT{
Function: func() {
tokenAPI := makeTokenizeAPI()
tokenAPI.Merge(0)
},
Regexp: true,
Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ on the top-level API`})
}
// func TestCallingMergeOnTopLevelAPI_Panics(t *testing.T) {
// AssertPanic(t, PanicT{
// Function: func() {
// tokenAPI := makeTokenizeAPI()
// tokenAPI.Merge(0)
// },
// Regexp: true,
// Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ on the top-level API`})
// }
func TestCallingMergeOnForkParentAPI_Panics(t *testing.T) {
AssertPanic(t, PanicT{
Function: func() {
tokenAPI := makeTokenizeAPI()
child := tokenAPI.Fork()
tokenAPI.Fork()
tokenAPI.Merge(child)
},
Regexp: true,
Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ ` +
`on API stack level 1, but the current stack level is 2 \(forgot to Dispose\(\) a forked child\?\)`})
}
// func TestCallingMergeOnForkParentAPI_Panics(t *testing.T) {
// AssertPanic(t, PanicT{
// Function: func() {
// tokenAPI := makeTokenizeAPI()
// child := tokenAPI.Fork()
// tokenAPI.Fork()
// tokenAPI.Merge(child)
// },
// Regexp: true,
// Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ ` +
// `on API stack level 1, but the current stack level is 2 \(forgot to Dispose\(\) a forked child\?\)`})
// }
func TestCallingDisposeOnTopLevelAPI_Panics(t *testing.T) {
AssertPanic(t, PanicT{
Function: func() {
tokenAPI := makeTokenizeAPI()
tokenAPI.Dispose(0)
},
Regexp: true,
Expect: `tokenize\.API\.Dispose\(\): Dispose\(\) called at /.*_test.go:\d+ on the top-level API`})
}
// func TestCallingDisposeOnTopLevelAPI_Panics(t *testing.T) {
// AssertPanic(t, PanicT{
// Function: func() {
// tokenAPI := makeTokenizeAPI()
// tokenAPI.Dispose(0)
// },
// Regexp: true,
// Expect: `tokenize\.API\.Dispose\(\): Dispose\(\) called at /.*_test.go:\d+ on the top-level API`})
// }
func TestCallingDisposeOnForkParentAPI_Panics(t *testing.T) {
AssertPanic(t, PanicT{
Function: func() {
tokenAPI := makeTokenizeAPI()
child := tokenAPI.Fork()
tokenAPI.Fork()
tokenAPI.Dispose(child)
},
Regexp: true,
Expect: `tokenize\.API\.Dispose\(\): Dispose\(\) called at /.*_test.go:\d+ ` +
`on API stack level 1, but the current stack level is 2 \(forgot to Dispose\(\) a forked child\?\)`})
}
// func TestCallingDisposeOnForkParentAPI_Panics(t *testing.T) {
// AssertPanic(t, PanicT{
// Function: func() {
// tokenAPI := makeTokenizeAPI()
// child := tokenAPI.Fork()
// tokenAPI.Fork()
// tokenAPI.Dispose(child)
// },
// Regexp: true,
// Expect: `tokenize\.API\.Dispose\(\): Dispose\(\) called at /.*_test.go:\d+ ` +
// `on API stack level 1, but the current stack level is 2 \(forgot to Dispose\(\) a forked child\?\)`})
// }
func TestCallingForkOnForkedParentAPI_Panics(t *testing.T) {
AssertPanic(t, PanicT{
Function: func() {
tokenAPI := makeTokenizeAPI()
tokenAPI.Fork()
g := tokenAPI.Fork()
tokenAPI.Fork()
tokenAPI.Merge(g)
},
Regexp: true,
Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ ` +
`on API stack level 2, but the current stack level is 3 \(forgot to Dispose\(\) a forked child\?\)`})
}
// func TestCallingForkOnForkedParentAPI_Panics(t *testing.T) {
// AssertPanic(t, PanicT{
// Function: func() {
// tokenAPI := makeTokenizeAPI()
// tokenAPI.Fork()
// g := tokenAPI.Fork()
// tokenAPI.Fork()
// tokenAPI.Merge(g)
// },
// Regexp: true,
// Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ ` +
// `on API stack level 2, but the current stack level is 3 \(forgot to Dispose\(\) a forked child\?\)`})
// }
func TestAccept_UpdatesCursor(t *testing.T) {
tokenAPI := tokenize.NewAPI(strings.NewReader("input\r\nwith\r\nnewlines"))
@ -162,25 +162,25 @@ func TestWhenCallingPeekruneAtEndOfFile_EOFIsReturned(t *testing.T) {
AssertEqual(t, true, err == io.EOF, "returned error from NextRune()")
}
func TestAfterReadingruneAtEndOfFile_EarlierRunesCanStillBeAccessed(t *testing.T) {
i := tokenize.NewAPI(strings.NewReader("X"))
child := i.Fork()
// func TestAfterReadingruneAtEndOfFile_EarlierRunesCanStillBeAccessed(t *testing.T) {
// i := tokenize.NewAPI(strings.NewReader("X"))
// child := i.Fork()
// To to the EOF.
r, _, _ := i.Rune.Peek(0)
i.Rune.Accept(r)
r, _, err := i.Rune.Peek(0)
AssertEqual(t, true, r == utf8.RuneError, "returned rune from 2nd NextRune()")
AssertEqual(t, true, err == io.EOF, "returned error from 2nd NextRune()")
// // To to the EOF.
// r, _, _ := i.Rune.Peek(0)
// i.Rune.Accept(r)
// r, _, err := i.Rune.Peek(0)
// AssertEqual(t, true, r == utf8.RuneError, "returned rune from 2nd NextRune()")
// AssertEqual(t, true, err == io.EOF, "returned error from 2nd NextRune()")
// Brings the read offset back to the start.
i.Dispose(child)
// // Brings the read offset back to the start.
// i.Dispose(child)
// So here we should see the same input data as before.
r, _, err = i.Rune.Peek(0)
AssertEqual(t, 'X', r, "returned rune from 2nd NextRune()")
AssertEqual(t, true, err == nil, "returned error from 2nd NextRune()")
}
// // So here we should see the same input data as before.
// r, _, err = i.Rune.Peek(0)
// AssertEqual(t, 'X', r, "returned rune from 2nd NextRune()")
// AssertEqual(t, true, err == nil, "returned error from 2nd NextRune()")
// }
func makeTokenizeAPI() *tokenize.API {
return tokenize.NewAPI("Testing")

View File

@ -4,108 +4,106 @@ import (
"testing"
)
func TestFork_CreatesForkOfInputAtSameCursorPosition(t *testing.T) {
func TestMakeSplitOutput_SplitsOutputAtActiveCursorPosition(t *testing.T) {
// Create input, accept the first rune.
tokenAPI := NewAPI("Testing")
r, _, _ := tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // T
AssertEqual(t, "T", tokenAPI.Output.String(), "accepted rune in input")
// Fork
child := tokenAPI.Fork()
AssertEqual(t, 1, tokenAPI.stackFrame.offset, "parent offset")
AssertEqual(t, 1, tokenAPI.stackFrame.offset, "child offset")
// Split
split := tokenAPI.SplitOutput()
AssertEqual(t, 1, tokenAPI.pointers.offset, "parent offset")
AssertEqual(t, 1, tokenAPI.pointers.offset, "child offset")
// Accept two runes via fork.
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // e
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // s
AssertEqual(t, "es", tokenAPI.Output.String(), "result runes in fork")
AssertEqual(t, 1, tokenAPI.stackFrames[tokenAPI.stackLevel-1].offset, "parent offset")
AssertEqual(t, 3, tokenAPI.stackFrame.offset, "child offset")
AssertEqual(t, "es", tokenAPI.Output.String(), "result runes in split output")
AssertEqual(t, 3, tokenAPI.pointers.offset, "offset in split output")
// Merge fork back into parent
tokenAPI.Merge(child)
tokenAPI.Dispose(child)
// Merge split output back into main output.
tokenAPI.MergeSplitOutput(split)
AssertEqual(t, "Tes", tokenAPI.Output.String(), "result runes in parent Input after Merge()")
AssertEqual(t, 3, tokenAPI.stackFrame.offset, "parent offset")
AssertEqual(t, 3, tokenAPI.pointers.offset, "parent offset")
}
func TestGivenForkedChildWhichAcceptedRune_AfterMerging_RuneEndsUpInParentResult(t *testing.T) {
tokenAPI := NewAPI("Testing")
r, _, _ := tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // T
// func TestGivenForkedChildWhichAcceptedRune_AfterMerging_RuneEndsUpInParentResult(t *testing.T) {
// tokenAPI := NewAPI("Testing")
// r, _, _ := tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // T
f1 := tokenAPI.Fork()
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // e
// f1 := tokenAPI.Fork()
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // e
f2 := tokenAPI.Fork()
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // s
AssertEqual(t, "s", tokenAPI.Output.String(), "f2 String()")
AssertEqual(t, 3, tokenAPI.stackFrame.offset, "f2.offset A")
// f2 := tokenAPI.Fork()
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // s
// AssertEqual(t, "s", tokenAPI.Output.String(), "f2 String()")
// AssertEqual(t, 3, tokenAPI.stackFrame.offset, "f2.offset A")
tokenAPI.Merge(f2)
tokenAPI.Dispose(f2)
AssertEqual(t, "es", tokenAPI.Output.String(), "f1 String()")
AssertEqual(t, 3, tokenAPI.stackFrame.offset, "f1.offset A")
// tokenAPI.Merge(f2)
// tokenAPI.Dispose(f2)
// AssertEqual(t, "es", tokenAPI.Output.String(), "f1 String()")
// AssertEqual(t, 3, tokenAPI.stackFrame.offset, "f1.offset A")
tokenAPI.Merge(f1)
tokenAPI.Dispose(f1)
AssertEqual(t, "Tes", tokenAPI.Output.String(), "top-level API String()")
AssertEqual(t, 3, tokenAPI.stackFrame.offset, "f1.offset A")
}
// tokenAPI.Merge(f1)
// tokenAPI.Dispose(f1)
// AssertEqual(t, "Tes", tokenAPI.Output.String(), "top-level API String()")
// AssertEqual(t, 3, tokenAPI.stackFrame.offset, "f1.offset A")
// }
func TestFlushInput(t *testing.T) {
tokenAPI := NewAPI("cool")
// func TestFlushInput(t *testing.T) {
// tokenAPI := NewAPI("cool")
// Flushing without any read data is okay. FlushInput() will return
// false in this case, and nothing else happens.
AssertTrue(t, tokenAPI.Input.Flush() == false, "flush input at start")
// // Flushing without any read data is okay. FlushInput() will return
// // false in this case, and nothing else happens.
// AssertTrue(t, tokenAPI.Input.Flush() == false, "flush input at start")
r, _, _ := tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // c
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // o
// r, _, _ := tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // c
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // o
AssertTrue(t, tokenAPI.Input.Flush() == true, "flush input after reading some data")
AssertEqual(t, 0, tokenAPI.stackFrame.offset, "offset after flush input")
// AssertTrue(t, tokenAPI.Input.Flush() == true, "flush input after reading some data")
// AssertEqual(t, 0, tokenAPI.stackFrame.offset, "offset after flush input")
AssertTrue(t, tokenAPI.Input.Flush() == false, "flush input after flush input")
// AssertTrue(t, tokenAPI.Input.Flush() == false, "flush input after flush input")
// Read offset is now zero, but reading should continue after "co".
// The output so far isn't modified, so the following accept calls
// will add their runes to the already accepted string "co".
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // o
r, _, _ = tokenAPI.Rune.Peek(0)
tokenAPI.Rune.Accept(r) // o
// // Read offset is now zero, but reading should continue after "co".
// // The output so far isn't modified, so the following accept calls
// // will add their runes to the already accepted string "co".
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // o
// r, _, _ = tokenAPI.Rune.Peek(0)
// tokenAPI.Rune.Accept(r) // o
AssertEqual(t, "cool", tokenAPI.Output.String(), "end result")
}
// AssertEqual(t, "cool", tokenAPI.Output.String(), "end result")
// }
func TestInputFlusherWrapper(t *testing.T) {
runeA := A.Rune('a')
flushB := C.FlushInput(A.Rune('b'))
tokenAPI := NewAPI("abaab")
runeA(tokenAPI)
AssertEqual(t, 1, tokenAPI.stackFrame.offset, "offset after 1 read")
AssertEqual(t, "a", tokenAPI.Output.String(), "runes after 1 read")
flushB(tokenAPI)
AssertEqual(t, 0, tokenAPI.stackFrame.offset, "offset after 2 reads + input flush")
AssertEqual(t, "ab", tokenAPI.Output.String(), "runes after 2 reads")
runeA(tokenAPI)
AssertEqual(t, 1, tokenAPI.stackFrame.offset, "offset after 3 reads")
AssertEqual(t, "aba", tokenAPI.Output.String(), "runes after 3 reads")
runeA(tokenAPI)
AssertEqual(t, 2, tokenAPI.stackFrame.offset, "offset after 4 reads")
AssertEqual(t, "abaa", tokenAPI.Output.String(), "runes after 4 reads")
flushB(tokenAPI)
AssertEqual(t, 0, tokenAPI.stackFrame.offset, "offset after 5 reads + input flush")
AssertEqual(t, "abaab", tokenAPI.Output.String(), "runes after 5 reads")
}
// func TestInputFlusherWrapper(t *testing.T) {
// runeA := A.Rune('a')
// flushB := C.FlushInput(A.Rune('b'))
// tokenAPI := NewAPI("abaab")
// runeA(tokenAPI)
// AssertEqual(t, 1, tokenAPI.stackFrame.offset, "offset after 1 read")
// AssertEqual(t, "a", tokenAPI.Output.String(), "runes after 1 read")
// flushB(tokenAPI)
// AssertEqual(t, 0, tokenAPI.stackFrame.offset, "offset after 2 reads + input flush")
// AssertEqual(t, "ab", tokenAPI.Output.String(), "runes after 2 reads")
// runeA(tokenAPI)
// AssertEqual(t, 1, tokenAPI.stackFrame.offset, "offset after 3 reads")
// AssertEqual(t, "aba", tokenAPI.Output.String(), "runes after 3 reads")
// runeA(tokenAPI)
// AssertEqual(t, 2, tokenAPI.stackFrame.offset, "offset after 4 reads")
// AssertEqual(t, "abaa", tokenAPI.Output.String(), "runes after 4 reads")
// flushB(tokenAPI)
// AssertEqual(t, 0, tokenAPI.stackFrame.offset, "offset after 5 reads + input flush")
// AssertEqual(t, "abaab", tokenAPI.Output.String(), "runes after 5 reads")
// }
func AssertEqual(t *testing.T, expected interface{}, actual interface{}, forWhat string) {
if expected != actual {