diff --git a/examples/example_dutchpostcode_test.go b/examples/example_dutchpostcode_test.go index 560d750..b40a3b3 100644 --- a/examples/example_dutchpostcode_test.go +++ b/examples/example_dutchpostcode_test.go @@ -29,7 +29,7 @@ func Example_dutchPostcodeUsingTokenizer() { fmt.Printf("[%d] Input: %q Error: %s\n", i, input, err) } else { fmt.Printf("[%d] Input: %q Output: %s Tokens:", i, input, result) - for _, t := range result.Tokens() { + for _, t := range result.Tokens { fmt.Printf(" %s(%s)", t.Type, t.Value) } fmt.Printf("\n") diff --git a/tokenize/assertions_test.go b/tokenize/assertions_test.go index 6b11beb..ec758f7 100644 --- a/tokenize/assertions_test.go +++ b/tokenize/assertions_test.go @@ -102,11 +102,11 @@ func AssertTokenMaker(t *testing.T, test TokenMakerT) { if err != nil { t.Errorf("Test %q failed with error: %s", test.Input, err) } else { - if len(result.Tokens()) != len(test.Expected) { - t.Errorf("Unexpected number of tokens in output:\nexpected: %d\nactual: %d", len(test.Expected), len(result.Tokens())) + if len(result.Tokens) != len(test.Expected) { + t.Errorf("Unexpected number of tokens in output:\nexpected: %d\nactual: %d", len(test.Expected), len(result.Tokens)) } for i, expected := range test.Expected { - actual := result.Token(i) + actual := result.Tokens[i] if expected.Type != actual.Type { t.Errorf("Unexpected Type in result.Tokens[%d]:\nexpected: (%T) %s\nactual: (%T) %s", i, expected.Type, expected.Type, actual.Type, actual.Type) } diff --git a/tokenize/handler_test.go b/tokenize/handler_test.go index 6f929bc..c7058c2 100644 --- a/tokenize/handler_test.go +++ b/tokenize/handler_test.go @@ -62,7 +62,7 @@ func ExampleHandler_SeparatedBy() { csv := t.Int("number", a.Digits).SeparatedBy(a.Comma) r, _ := csv.Match("123,456,7,8,9") - for i, token := range r.Tokens() { + for i, token := range r.Tokens { fmt.Printf("[%d] %v\n", i, token) } // Output: diff --git a/tokenize/handlers_builtin_test.go b/tokenize/handlers_builtin_test.go index af121b9..9c90ab0 100644 --- a/tokenize/handlers_builtin_test.go +++ b/tokenize/handlers_builtin_test.go @@ -460,7 +460,7 @@ func TestTokenGroup_Match(t *testing.T) { api, err := tokenizer("xxxxx") AssertTrue(t, err == nil, "Tokenizer result") - tokens := api.Tokens() + tokens := api.Tokens AssertEqual(t, 1, len(tokens), "Length of tokens slice") contained := tokens[0].Value.([]tokenize.Token) AssertEqual(t, 3, len(contained), "Length of contained tokens") @@ -476,7 +476,7 @@ func TestTokenGroup_Mismatch(t *testing.T) { api, err := tokenizer("12345") AssertTrue(t, err == nil, "Tokenizer result") - tokens := api.Tokens() + tokens := api.Tokens AssertEqual(t, 0, len(tokens), "Length of tokens slice") } diff --git a/tokenize/tokenizer_test.go b/tokenize/tokenizer_test.go index bbe6f0b..0786fc4 100644 --- a/tokenize/tokenizer_test.go +++ b/tokenize/tokenizer_test.go @@ -40,7 +40,7 @@ func ExampleNew() { result, err := tokenizer(input) if err == nil { - fmt.Printf("Result: %s\n", result.Tokens()) + fmt.Printf("Result: %s\n", result.Tokens) } else { fmt.Printf("Error: %s\n", err) }