package tokenize_test import ( "fmt" "testing" "git.makaay.nl/mauricem/go-parsekit/tokenize" ) func ExampleNewAPI() { tokenize.NewAPI("The input that the API will handle") // Output: } // func ExampleAPI_NextRune() { // api := tokenize.NewAPI("The input that the API will handle") // r, err := api.NextRune() // fmt.Printf("Rune read from input; %c\n", r) // fmt.Printf("The error: %v\n", err) // fmt.Printf("API results: %q\n", api.String()) // // Output: // // Rune read from input; T // // The error: // // API results: "" // } func ExampleAPI_PeekRune() { api := tokenize.NewAPI("The input that the API will handle") r1, _, err := api.PeekRune(19) // 'A' r2, _, err := api.PeekRune(20) // 'P' r3, _, err := api.PeekRune(21) // 'I' _, _, err = api.PeekRune(100) // EOF fmt.Printf("%c%c%c %s\n", r1, r2, r3, err) // Output: // API EOF } func ExampleAPI_AcceptRune() { api := tokenize.NewAPI("The input that the API will handle") // reads 'T' and adds it to the API results r, _, _ := api.PeekRune(0) api.AcceptRune(r) // reads 'h' and adds it to the API results r, _, _ = api.PeekRune(0) api.AcceptRune(r) // reads 'e', but does not add it to the API results r, _, _ = api.PeekRune(0) fmt.Printf("API results: %q\n", api.String()) // Output: // API results: "Th" } func ExampleAPI_modifyingResults() { api := tokenize.NewAPI("") api.AddString("Some runes") api.AddRunes(' ', 'a', 'd', 'd', 'e', 'd') api.AddRunes(' ', 'i', 'n', ' ') api.AddString("various ways") fmt.Printf("API result first 10 runes: %q\n", api.Runes()[0:10]) fmt.Printf("API result runes as string: %q\n", api.String()) api.SetString("new ") api.AddString("set ") api.AddString("of ") api.AddRunes('r', 'u', 'n', 'e', 's') fmt.Printf("API result runes as string: %q\n", api.String()) fmt.Printf("API result runes: %q\n", api.Runes()) fmt.Printf("API third rune: %q\n", api.Rune(2)) api.AddTokens(tokenize.Token{ Type: 42, Value: "towel"}) api.AddTokens(tokenize.Token{ Type: 73, Value: "Zaphod"}) fmt.Printf("API result tokens: %v\n", api.Tokens()) fmt.Printf("API second result token: %v\n", api.Token(1)) // Output: // API result first 10 runes: ['S' 'o' 'm' 'e' ' ' 'r' 'u' 'n' 'e' 's'] // API result runes as string: "Some runes added in various ways" // API result runes as string: "new set of runes" // API result runes: ['n' 'e' 'w' ' ' 's' 'e' 't' ' ' 'o' 'f' ' ' 'r' 'u' 'n' 'e' 's'] // API third rune: 'w' // API result tokens: [42("towel") 73("Zaphod")] // API second result token: 73("Zaphod") } // TODO FIXME // func ExampleAPI_Reset() { // api := tokenize.NewAPI("Very important input!") // api.NextRune() // read 'V' // api.Accept() // api.NextRune() // read 'e' // api.Accept() // fmt.Printf("API results: %q at %s\n", api.String(), api.Cursor()) // // Reset clears the results. // api.Reset() // fmt.Printf("API results: %q at %s\n", api.String(), api.Cursor()) // // So then doing the same read operations, the same data are read. // api.NextRune() // read 'V' // api.Accept() // api.NextRune() // read 'e' // api.Accept() // fmt.Printf("API results: %q at %s\n", api.String(), api.Cursor()) // // Output: // // API results: "Ve" at line 1, column 3 // // API results: "" at start of file // // API results: "Ve" at line 1, column 3 // } func ExampleAPI_Fork() { // This custom Handler checks for input 'a', 'b' or 'c'. abcHandler := func(t *tokenize.API) bool { a := tokenize.A for _, r := range []rune{'a', 'b', 'c'} { child := t.Fork() // fork, so we won't change parent t if a.Rune(r)(t) { t.Merge(child) // accept results into parent of child t.Dispose(child) // return to the parent level return true // and report a successful match } t.Dispose(child) // return to the parent level } // If we get here, then no match was found. Return false to communicate // this to the caller. return false } // Note: a custom Handler is normally not what you need. // You can make use of the parser/combinator tooling to make the // implementation a lot simpler and to take care of forking at // the appropriate places. The handler from above can be replaced with: simpler := tokenize.A.RuneRange('a', 'c') result, err := tokenize.New(abcHandler)("another test") fmt.Println(result, err) result, err = tokenize.New(simpler)("curious") fmt.Println(result, err) result, err = tokenize.New(abcHandler)("bang on!") fmt.Println(result, err) result, err = tokenize.New(abcHandler)("not a match") fmt.Println(result, err) // Output: // a // c // b // mismatch at start of file } // TODO FIXME // func ExampleAPI_Merge() { // tokenHandler := func(t *tokenize.API) bool { // child1 := t.Fork() // t.NextRune() // reads 'H' // t.Accept() // t.NextRune() // reads 'i' // t.Accept() // child2 := t.Fork() // t.NextRune() // reads ' ' // t.Accept() // t.NextRune() // reads 'm' // t.Accept() // t.Dispose(child2) // t.Merge(child1) // We merge child1, which has read 'H' and 'i' only. // t.Dispose(child1) // and clean up child1 to return to the parent // return true // } // result, _ := tokenize.New(tokenHandler)("Hi mister X!") // fmt.Println(result.String()) // // Output: // // Hi // } // TODO FIXME // func TestMultipleLevelsOfForksAndMerges(t *testing.T) { // api := tokenize.NewAPI("abcdefghijklmnopqrstuvwxyz") // // Fork a few levels. // child1 := api.Fork() // child2 := api.Fork() // child3 := api.Fork() // child4 := api.Fork() // // Read a rune 'a' from child4. // r, _ := api.NextRune() // AssertEqual(t, 'a', r, "child4 rune 1") // api.Accept() // AssertEqual(t, "a", api.String(), "child4 runes after rune 1") // // Read another rune 'b' from child4. // r, _ = api.NextRune() // AssertEqual(t, 'b', r, "child4 rune 2") // api.Accept() // AssertEqual(t, "ab", api.String(), "child4 runes after rune 2") // // Merge "ab" from child4 to child3. // api.Merge(child4) // AssertEqual(t, "", api.String(), "child4 runes after first merge") // // Read some more from child4. // r, _ = api.NextRune() // AssertEqual(t, 'c', r, "child4 rune 3") // api.Accept() // AssertEqual(t, "c", api.String(), "child4 runes after rune 1") // AssertEqual(t, "line 1, column 4", api.Cursor(), "cursor child4 rune 3") // // Merge "c" from child4 to child3. // api.Merge(child4) // // And dispose of child4, making child3 the active stack level. // api.Dispose(child4) // // Child3 should now have the compbined results "abc" from child4's work. // AssertEqual(t, "abc", api.String(), "child3 after merge of child4") // AssertEqual(t, "line 1, column 4", api.Cursor(), "cursor child3 rune 3, after merge of child4") // // Now read some data from child3. // r, _ = api.NextRune() // AssertEqual(t, 'd', r, "child3 rune 5") // api.Accept() // r, _ = api.NextRune() // AssertEqual(t, 'e', r, "child3 rune 5") // api.Accept() // r, _ = api.NextRune() // AssertEqual(t, 'f', r, "child3 rune 5") // api.Accept() // AssertEqual(t, "abcdef", api.String(), "child3 total result after rune 6") // // Temporarily go some new forks from here, but don't use their outcome. // child3sub1 := api.Fork() // api.NextRune() // api.Accept() // api.NextRune() // api.Accept() // child3sub2 := api.Fork() // api.NextRune() // api.Accept() // api.Merge(child3sub2) // do merge sub2 down to sub1 // api.Dispose(child3sub2) // and dispose of sub2 // api.Dispose(child3sub1) // but dispose of sub1 without merging // // Instead merge the results from before this forking segway from child3 to child2 // // and dispose of it. // api.Merge(child3) // api.Dispose(child3) // AssertEqual(t, "abcdef", api.String(), "child2 total result after merge of child3") // AssertEqual(t, "line 1, column 7", api.Cursor(), "cursor child2 after merge child3") // // Merge child2 to child1 and dispose of it. // api.Merge(child2) // api.Dispose(child2) // // Merge child1 a few times to the top level api. // api.Merge(child1) // api.Merge(child1) // api.Merge(child1) // api.Merge(child1) // // And dispose of it. // api.Dispose(child1) // // Read some data from the top level api. // r, _ = api.NextRune() // api.Accept() // AssertEqual(t, "abcdefg", api.String(), "api string end result") // AssertEqual(t, "line 1, column 8", api.Cursor(), "api cursor end result") // } // TODO FIXME // func TestClearRunes(t *testing.T) { // api := tokenize.NewAPI("Laphroaig") // api.NextRune() // Read 'L' // api.Accept() // Add to runes // api.NextRune() // Read 'a' // api.Accept() // Add to runes // api.ClearRunes() // Clear the runes, giving us a fresh start. // api.NextRune() // Read 'p' // api.Accept() // Add to runes // api.NextRune() // Read 'r' // api.Accept() // Add to runes // AssertEqual(t, "ph", api.String(), "api string end result") // } func TestMergeScenariosForTokens(t *testing.T) { api := tokenize.NewAPI("") token1 := tokenize.Token{Value: 1} token2 := tokenize.Token{Value: 2} token3 := tokenize.Token{Value: 3} token4 := tokenize.Token{Value: 4} api.SetTokens(token1) tokens := api.Tokens() AssertEqual(t, 1, len(tokens), "Tokens 1") child := api.Fork() tokens = api.Tokens() AssertEqual(t, 0, len(tokens), "Tokens 2") api.AddTokens(token2) api.Merge(child) api.Dispose(child) tokens = api.Tokens() AssertEqual(t, 2, len(tokens), "Tokens 3") child = api.Fork() api.AddTokens(token3) api.Reset() api.AddTokens(token4) api.Merge(child) api.Dispose(child) tokens = api.Tokens() AssertEqual(t, 3, len(tokens), "Tokens 4") AssertEqual(t, 1, api.TokenValue(0).(int), "Tokens 4, value 0") AssertEqual(t, 2, api.TokenValue(1).(int), "Tokens 4, value 1") AssertEqual(t, 4, api.TokenValue(2).(int), "Tokens 4, value 2") }