go-parsekit/tokenize/tokenapi_example_test.go

71 lines
1.9 KiB
Go

package tokenize_test
import (
"fmt"
"git.makaay.nl/mauricem/go-parsekit/tokenize"
)
func ExampleTokenAPI_Fork() {
// This custom TokenHandler checks for input 'a', 'b' or 'c'.
abcHandler := func(t *tokenize.TokenAPI) bool {
a := tokenize.A
for _, r := range []rune{'a', 'b', 'c'} {
child := t.Fork() // fork, so we won't change parent t
if a.Rune(r)(child) {
child.Merge() // accept results into parent t
return true // and report a successful match
}
}
// If we get here, then no match was found. Return false to communicate
// this to the caller.
return false
}
// Note: a custom TokenHandler is normally not what you need.
// You can make use of the parser/combinator tooling to do things
// a lot simpler and take care of forking at the appropriate places.
// The handler from above can be replaced with:
simpler := tokenize.A.RuneRange('a', 'c')
result, err := tokenize.NewTokenizer(abcHandler).Execute("another test")
fmt.Println(result, err)
result, err = tokenize.NewTokenizer(simpler).Execute("curious")
fmt.Println(result, err)
result, err = tokenize.NewTokenizer(abcHandler).Execute("bang on!")
fmt.Println(result, err)
result, err = tokenize.NewTokenizer(abcHandler).Execute("not a match")
fmt.Println(result, err)
// Output:
// a <nil>
// c <nil>
// b <nil>
// <nil> mismatch at start of file
}
func ExampleTokenAPI_Merge() {
tokenHandler := func(t *tokenize.TokenAPI) bool {
child1 := t.Fork()
child1.NextRune() // reads 'H'
child1.Accept()
child1.NextRune() // reads 'i'
child1.Accept()
child2 := child1.Fork()
child2.NextRune() // reads ' '
child2.Accept()
child2.NextRune() // reads 'd'
child2.Accept()
child1.Merge() // We merge child1, which has read 'H' and 'i' only.
return true
}
result, _ := tokenize.NewTokenizer(tokenHandler).Execute("Hi mister X!")
fmt.Println(result)
// Output:
// Hi
}