go-parsekit/tokenize/api_test.go

331 lines
9.1 KiB
Go

package tokenize_test
import (
"fmt"
"testing"
"git.makaay.nl/mauricem/go-parsekit/tokenize"
)
func ExampleNewAPI() {
tokenize.NewAPI("The input that the API will handle")
// Output:
}
func ExampleAPI_NextRune() {
api := tokenize.NewAPI("The input that the API will handle")
r, err := api.NextRune()
fmt.Printf("Rune read from input; %c\n", r)
fmt.Printf("The error: %v\n", err)
fmt.Printf("API results: %q\n", api.String())
// Output:
// Rune read from input; T
// The error: <nil>
// API results: ""
}
func ExampleAPI_Accept() {
api := tokenize.NewAPI("The input that the API will handle")
api.NextRune() // reads 'T'
api.Accept() // adds 'T' to the API results
api.NextRune() // reads 'h'
api.Accept() // adds 'h' to the API results
api.NextRune() // reads 'e', but it is not added to the API results
fmt.Printf("API results: %q\n", api.String())
// Output:
// API results: "Th"
}
func ExampleAPI_modifyingResults() {
api := tokenize.NewAPI("")
api.AddString("Some runes")
api.AddRunes(' ', 'a', 'd', 'd', 'e', 'd')
api.AddRunes(' ', 'i', 'n', ' ')
api.AddString("various ways")
fmt.Printf("API result first 10 runes: %q\n", api.Runes()[0:10])
fmt.Printf("API result runes as string: %q\n", api.String())
api.SetString("new ")
api.AddString("set ")
api.AddString("of ")
api.AddRunes('r', 'u', 'n', 'e', 's')
fmt.Printf("API result runes as string: %q\n", api.String())
fmt.Printf("API result runes: %q\n", api.Runes())
fmt.Printf("API third rune: %q\n", api.Rune(2))
api.AddTokens(tokenize.Token{
Type: 42,
Value: "towel"})
api.AddTokens(tokenize.Token{
Type: 73,
Value: "Zaphod"})
fmt.Printf("API result tokens: %v\n", api.Tokens())
fmt.Printf("API second result token: %v\n", api.Token(1))
// Output:
// API result first 10 runes: ['S' 'o' 'm' 'e' ' ' 'r' 'u' 'n' 'e' 's']
// API result runes as string: "Some runes added in various ways"
// API result runes as string: "new set of runes"
// API result runes: ['n' 'e' 'w' ' ' 's' 'e' 't' ' ' 'o' 'f' ' ' 'r' 'u' 'n' 'e' 's']
// API third rune: 'w'
// API result tokens: [42("towel") 73("Zaphod")]
// API second result token: 73("Zaphod")
}
func ExampleAPI_Reset() {
api := tokenize.NewAPI("Very important input!")
api.NextRune()
api.Accept()
api.NextRune()
api.Accept()
fmt.Printf("API results: %q at %s\n", api.String(), api.Cursor())
// Reset clears the results, but keeps the cursor position.
api.Reset()
fmt.Printf("API results: %q at %s\n", api.String(), api.Cursor())
api.NextRune()
api.Accept()
api.NextRune()
api.Accept()
fmt.Printf("API results: %q at %s\n", api.String(), api.Cursor())
// Output:
// API results: "Ve" at line 1, column 3
// API results: "" at line 1, column 3
// API results: "ry" at line 1, column 5
}
func ExampleAPI_Fork() {
// This custom Handler checks for input 'a', 'b' or 'c'.
abcHandler := func(t *tokenize.API) bool {
a := tokenize.A
for _, r := range []rune{'a', 'b', 'c'} {
child := t.Fork() // fork, so we won't change parent t
if a.Rune(r)(t) {
t.Merge(child) // accept results into parent of child
t.Dispose(child) // return to the parent level
return true // and report a successful match
}
t.Dispose(child) // return to the parent level
}
// If we get here, then no match was found. Return false to communicate
// this to the caller.
return false
}
// Note: a custom Handler is normally not what you need.
// You can make use of the parser/combinator tooling to make the
// implementation a lot simpler and to take care of forking at
// the appropriate places. The handler from above can be replaced with:
simpler := tokenize.A.RuneRange('a', 'c')
result, err := tokenize.New(abcHandler)("another test")
fmt.Println(result, err)
result, err = tokenize.New(simpler)("curious")
fmt.Println(result, err)
result, err = tokenize.New(abcHandler)("bang on!")
fmt.Println(result, err)
result, err = tokenize.New(abcHandler)("not a match")
fmt.Println(result, err)
// Output:
// a <nil>
// c <nil>
// b <nil>
// <nil> mismatch at start of file
}
func ExampleAPI_Merge() {
tokenHandler := func(t *tokenize.API) bool {
child1 := t.Fork()
t.NextRune() // reads 'H'
t.Accept()
t.NextRune() // reads 'i'
t.Accept()
child2 := t.Fork()
t.NextRune() // reads ' '
t.Accept()
t.NextRune() // reads 'm'
t.Accept()
t.Dispose(child2)
t.Merge(child1) // We merge child1, which has read 'H' and 'i' only.
t.Dispose(child1) // and clean up child1 to return to the parent
return true
}
result, _ := tokenize.New(tokenHandler)("Hi mister X!")
fmt.Println(result.String())
// Output:
// Hi
}
func TestMultipleLevelsOfForksAndMerges(t *testing.T) {
api := tokenize.NewAPI("abcdefghijklmnopqrstuvwxyz")
// Fork a few levels.
child1 := api.Fork()
child2 := api.Fork()
child3 := api.Fork()
child4 := api.Fork()
// Read a rune 'a' from child4.
r, _ := api.NextRune()
AssertEqual(t, 'a', r, "child4 rune 1")
api.Accept()
AssertEqual(t, "a", api.String(), "child4 runes after rune 1")
// Read another rune 'b' from child4.
r, _ = api.NextRune()
AssertEqual(t, 'b', r, "child4 rune 2")
api.Accept()
AssertEqual(t, "ab", api.String(), "child4 runes after rune 2")
// Merge "ab" from child4 to child3.
api.Merge(child4)
AssertEqual(t, "", api.String(), "child4 runes after first merge")
// Read some more from child4.
r, _ = api.NextRune()
AssertEqual(t, 'c', r, "child4 rune 3")
api.Accept()
AssertEqual(t, "c", api.String(), "child4 runes after rune 1")
AssertEqual(t, "line 1, column 4", api.Cursor().String(), "cursor child4 rune 3")
// Merge "c" from child4 to child3.
api.Merge(child4)
// And dispose of child4, making child3 the active stack level.
api.Dispose(child4)
// Child3 should now have the compbined results "abc" from child4's work.
AssertEqual(t, "abc", api.String(), "child3 after merge of child4")
AssertEqual(t, "line 1, column 4", api.Cursor().String(), "cursor child3 rune 3, after merge of child4")
// Now read some data from child3.
r, _ = api.NextRune()
AssertEqual(t, 'd', r, "child3 rune 5")
api.Accept()
r, _ = api.NextRune()
AssertEqual(t, 'e', r, "child3 rune 5")
api.Accept()
r, _ = api.NextRune()
AssertEqual(t, 'f', r, "child3 rune 5")
api.Accept()
AssertEqual(t, "abcdef", api.String(), "child3 total result after rune 6")
// Temporarily go some new forks from here, but don't use their outcome.
child3sub1 := api.Fork()
api.NextRune()
api.Accept()
api.NextRune()
api.Accept()
child3sub2 := api.Fork()
api.NextRune()
api.Accept()
api.Merge(child3sub2) // do merge sub2 down to sub1
api.Dispose(child3sub2) // and dispose of sub2
api.Dispose(child3sub1) // but dispose of sub1 without merging
// Instead merge the results from before this forking segway from child3 to child2
// and dispose of it.
api.Merge(child3)
api.Dispose(child3)
AssertEqual(t, "abcdef", api.String(), "child2 total result after merge of child3")
AssertEqual(t, "line 1, column 7", api.Cursor().String(), "cursor child2 after merge child3")
// Merge child2 to child1 and dispose of it.
api.Merge(child2)
api.Dispose(child2)
// Merge child1 a few times to the top level api.
api.Merge(child1)
api.Merge(child1)
api.Merge(child1)
api.Merge(child1)
// And dispose of it.
api.Dispose(child1)
// Read some data from the top level api.
r, _ = api.NextRune()
api.Accept()
AssertEqual(t, "abcdefg", api.String(), "api string end result")
AssertEqual(t, "line 1, column 8", api.Cursor().String(), "api cursor end result")
}
func TestClearRunes(t *testing.T) {
api := tokenize.NewAPI("Laphroaig")
api.NextRune() // Read 'L'
api.Accept() // Add to runes
api.NextRune() // Read 'a'
api.Accept() // Add to runes
api.ClearRunes() // Clear the runes, giving us a fresh start.
api.NextRune() // Read 'p'
api.Accept() // Add to runes
api.NextRune() // Read 'r'
api.Accept() // Add to runes
AssertEqual(t, "ph", api.String(), "api string end result")
}
func TestMergeScenariosForTokens(t *testing.T) {
api := tokenize.NewAPI("")
token1 := tokenize.Token{Value: 1}
token2 := tokenize.Token{Value: 2}
token3 := tokenize.Token{Value: 3}
token4 := tokenize.Token{Value: 4}
api.SetTokens(token1)
tokens := api.Tokens()
AssertEqual(t, 1, len(tokens), "Tokens 1")
child := api.Fork()
tokens = api.Tokens()
AssertEqual(t, 0, len(tokens), "Tokens 2")
api.AddTokens(token2)
// Here we can merge by expanding the token slice on the parent,
// because the end of the parent slice and the start of the child
// slice align.
api.Merge(child)
api.Dispose(child)
tokens = api.Tokens()
AssertEqual(t, 2, len(tokens), "Tokens 3")
child = api.Fork()
api.AddTokens(token3)
api.Reset()
api.AddTokens(token4)
// Here the merge means that token4 will be copied to the end of
// the token slice of the parent, since there's a gap at the place
// where token3 used to be.
api.Merge(child)
api.Dispose(child)
tokens = api.Tokens()
AssertEqual(t, 3, len(tokens), "Tokens 4")
AssertEqual(t, 1, api.TokenValue(0).(int), "Tokens 4, value 0")
AssertEqual(t, 2, api.TokenValue(1).(int), "Tokens 4, value 1")
AssertEqual(t, 4, api.TokenValue(2).(int), "Tokens 4, value 2")
}