230 lines
6.7 KiB
Go
230 lines
6.7 KiB
Go
package tokenize_test
|
|
|
|
import (
|
|
"fmt"
|
|
"testing"
|
|
|
|
tokenize "git.makaay.nl/mauricem/go-parsekit/tokenize"
|
|
)
|
|
|
|
// TODO For error handling, it would be really cool if for example the
|
|
// 10.0.300.1/24 case would return an actual error stating that
|
|
// 300 is not a valid octet for an IPv4 address.
|
|
// Biggest thing to take care of here, is that errors should not stop
|
|
// a Parser flow (since we might be trying to match different cases in
|
|
// sequence), but a Parser flow should optionally be able to make use
|
|
// of the actual error.
|
|
// The same goes for a Tokenizer, since those can also make use of
|
|
// optional matching using tokenize.C.Any(...) for example. If matching
|
|
// for Any(IPv4, Digits), the example case should simply end up with 10
|
|
// after the IPv4 mismatch.
|
|
func ExampleNew() {
|
|
// Build the tokenizer for ip/mask.
|
|
var c, a, t = tokenize.C, tokenize.A, tokenize.T
|
|
ip := t.Str("ip", a.IPv4)
|
|
mask := t.Int8("mask", a.IPv4CIDRMask)
|
|
cidr := c.Seq(ip, a.Slash, mask)
|
|
tokenizer := tokenize.New(cidr)
|
|
|
|
for _, input := range []string{
|
|
"000.000.000.000/000",
|
|
"192.168.0.1/24",
|
|
"255.255.255.255/32",
|
|
"10.0.300.1/24",
|
|
"not an IPv4 CIDR",
|
|
} {
|
|
// Execute returns a Result and an error, which is nil on success.
|
|
result, err := tokenizer(input)
|
|
|
|
if err == nil {
|
|
fmt.Printf("Result: %s\n", result.Tokens)
|
|
} else {
|
|
fmt.Printf("Error: %s\n", err)
|
|
}
|
|
}
|
|
// Output:
|
|
// Result: [ip("0.0.0.0") mask((int8)0)]
|
|
// Result: [ip("192.168.0.1") mask((int8)24)]
|
|
// Result: [ip("255.255.255.255") mask((int8)32)]
|
|
// Error: mismatch at start of file
|
|
// Error: mismatch at start of file
|
|
}
|
|
|
|
// TODO FIXME
|
|
// func TestCallingNextRune_ReturnsNextRune(t *testing.T) {
|
|
// api := makeTokenizeAPI()
|
|
// r, _ := api.NextRune()
|
|
// AssertEqual(t, 'T', r, "first rune")
|
|
// }
|
|
|
|
// TODO FIXME
|
|
// func TestInputCanAcceptRunesFromReader(t *testing.T) {
|
|
// i := makeTokenizeAPI()
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// AssertEqual(t, "Tes", i.String(), "i.String()")
|
|
// }
|
|
|
|
// TODO FIXME
|
|
// func TestCallingNextRuneTwice_Panics(t *testing.T) {
|
|
// AssertPanic(t, PanicT{
|
|
// Function: func() {
|
|
// i := makeTokenizeAPI()
|
|
// i.NextRune()
|
|
// i.NextRune()
|
|
// },
|
|
// Regexp: true,
|
|
// Expect: `tokenize\.API\.NextRune\(\): NextRune\(\) called at /.*_test\.go:\d+ ` +
|
|
// `without a prior call to Accept\(\)`,
|
|
// })
|
|
// }
|
|
|
|
// TODO FIXME
|
|
// func TestCallingAcceptWithoutCallingNextRune_Panics(t *testing.T) {
|
|
// api := makeTokenizeAPI()
|
|
// AssertPanic(t, PanicT{
|
|
// Function: api.Accept,
|
|
// Regexp: true,
|
|
// Expect: `tokenize\.API\.Accept\(\): Accept\(\) called at /.*test\.go:\d+ ` +
|
|
// `without first calling NextRune\(\)`,
|
|
// })
|
|
// }
|
|
|
|
// TODO FIXME
|
|
// func TestCallingAcceptAfterReadError_Panics(t *testing.T) {
|
|
// api := tokenize.NewAPI("")
|
|
// AssertPanic(t, PanicT{
|
|
// Function: func() {
|
|
// api.NextRune()
|
|
// api.Accept()
|
|
// },
|
|
// Regexp: true,
|
|
// Expect: `tokenize\.API\.Accept\(\): Accept\(\) called at /.*_test\.go:\d+` +
|
|
// `, but the prior call to NextRune\(\) failed`,
|
|
// })
|
|
// }
|
|
|
|
func TestCallingMergeOnTopLevelAPI_Panics(t *testing.T) {
|
|
AssertPanic(t, PanicT{
|
|
Function: func() {
|
|
i := makeTokenizeAPI()
|
|
i.Merge(0)
|
|
},
|
|
Regexp: true,
|
|
Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ on the top-level API`})
|
|
}
|
|
|
|
func TestCallingMergeOnForkParentAPI_Panics(t *testing.T) {
|
|
AssertPanic(t, PanicT{
|
|
Function: func() {
|
|
i := makeTokenizeAPI()
|
|
child := i.Fork()
|
|
i.Fork()
|
|
i.Merge(child)
|
|
},
|
|
Regexp: true,
|
|
Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ ` +
|
|
`on API stack level 1, but the current stack level is 2 \(forgot to Dispose\(\) a forked child\?\)`})
|
|
}
|
|
|
|
func TestCallingDisposeOnTopLevelAPI_Panics(t *testing.T) {
|
|
AssertPanic(t, PanicT{
|
|
Function: func() {
|
|
i := makeTokenizeAPI()
|
|
i.Dispose(0)
|
|
},
|
|
Regexp: true,
|
|
Expect: `tokenize\.API\.Dispose\(\): Dispose\(\) called at /.*_test.go:\d+ on the top-level API`})
|
|
}
|
|
|
|
func TestCallingDisposeOnForkParentAPI_Panics(t *testing.T) {
|
|
AssertPanic(t, PanicT{
|
|
Function: func() {
|
|
i := makeTokenizeAPI()
|
|
child := i.Fork()
|
|
i.Fork()
|
|
i.Dispose(child)
|
|
},
|
|
Regexp: true,
|
|
Expect: `tokenize\.API\.Dispose\(\): Dispose\(\) called at /.*_test.go:\d+ ` +
|
|
`on API stack level 1, but the current stack level is 2 \(forgot to Dispose\(\) a forked child\?\)`})
|
|
}
|
|
|
|
func TestCallingForkOnForkedParentAPI_Panics(t *testing.T) {
|
|
AssertPanic(t, PanicT{
|
|
Function: func() {
|
|
i := makeTokenizeAPI()
|
|
i.Fork()
|
|
g := i.Fork()
|
|
i.Fork()
|
|
i.Merge(g)
|
|
},
|
|
Regexp: true,
|
|
Expect: `tokenize\.API\.Merge\(\): Merge\(\) called at /.*_test.go:\d+ ` +
|
|
`on API stack level 2, but the current stack level is 3 \(forgot to Dispose\(\) a forked child\?\)`})
|
|
}
|
|
|
|
// TODO FIXME
|
|
// func TestForkingInput_ClearsLastRune(t *testing.T) {
|
|
// AssertPanic(t, PanicT{
|
|
// Function: func() {
|
|
// i := makeTokenizeAPI()
|
|
// i.NextRune()
|
|
// i.Fork()
|
|
// i.Accept()
|
|
// },
|
|
// Regexp: true,
|
|
// Expect: `tokenize\.API\.Accept\(\): Accept\(\) called at /.*_test\.go:\d+ without first calling NextRune\(\)`,
|
|
// })
|
|
// }
|
|
|
|
// TODO FIXME
|
|
// func TestAccept_UpdatesCursor(t *testing.T) {
|
|
// i := tokenize.NewAPI(strings.NewReader("input\r\nwith\r\nnewlines"))
|
|
// AssertEqual(t, "start of file", i.Cursor(), "cursor 1")
|
|
// for j := 0; j < 6; j++ { // read "input\r", cursor end up at "\n"
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// }
|
|
// AssertEqual(t, "line 1, column 7", i.Cursor(), "cursor 2")
|
|
// i.NextRune() // read "\n", cursor ends up at start of new line
|
|
// i.Accept()
|
|
// AssertEqual(t, "line 2, column 1", i.Cursor(), "cursor 3")
|
|
// for j := 0; j < 10; j++ { // read "with\r\nnewl", cursor end up at "i"
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// }
|
|
// AssertEqual(t, "line 3, column 5", i.Cursor(), "cursor 4")
|
|
// }
|
|
|
|
// TODO FIXME
|
|
// func TestWhenCallingNextruneAtEndOfFile_EOFIsReturned(t *testing.T) {
|
|
// i := tokenize.NewAPI(strings.NewReader("X"))
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// r, err := i.NextRune()
|
|
// AssertEqual(t, true, r == utf8.RuneError, "returned rune from NextRune()")
|
|
// AssertEqual(t, true, err == io.EOF, "returned error from NextRune()")
|
|
// }
|
|
// TODO FIXME
|
|
// func TestAfterReadingruneAtEndOfFile_EarlierRunesCanStillBeAccessed(t *testing.T) {
|
|
// i := tokenize.NewAPI(strings.NewReader("X"))
|
|
// child := i.Fork()
|
|
// i.NextRune()
|
|
// i.Accept()
|
|
// r, err := i.NextRune()
|
|
// AssertEqual(t, true, r == utf8.RuneError, "returned rune from 2nd NextRune()")
|
|
// i.Dispose(child) // brings the read offset back to the start
|
|
// r, err = i.NextRune() // so here we should see the same rune
|
|
// AssertEqual(t, 'X', r, "returned rune from 2nd NextRune()")
|
|
// AssertEqual(t, true, err == nil, "returned error from 2nd NextRune()")
|
|
// }
|
|
|
|
func makeTokenizeAPI() *tokenize.API {
|
|
return tokenize.NewAPI("Testing")
|
|
}
|