Working on documentation.

This commit is contained in:
Maurice Makaay 2019-06-12 15:24:09 +00:00
parent 27c97ae902
commit cef6ae1bc4
6 changed files with 40 additions and 17 deletions

View File

@ -53,7 +53,7 @@ func Example_dutchPostcodeUsingTokenizer() {
func createPostcodeTokenizer() tokenize.Func {
// Easy access to the tokenize definitions.
C, A, M, T := tokenize.C, tokenize.A, tokenize.M, tokenize.T
a, m, t := tokenize.A, tokenize.M, tokenize.T
// Handler functions are created and combined to satisfy these rules:
// • A Dutch postcode consists of 4 digits and 2 letters (1234XX).
@ -61,11 +61,13 @@ func createPostcodeTokenizer() tokenize.Func {
// • A space between letters and digits is optional.
// • It is good form to write the letters in upper case.
// • It is good form to use a single space between digits and letters.
pcDigits := A.DigitNotZero.Then(A.Digit.Times(3))
pcLetter := A.ASCIILower.Or(A.ASCIIUpper)
pcLetters := M.ToUpper(pcLetter.Times(2))
space := M.Replace(A.Blanks.Optional(), " ")
postcode := C.Seq(T.Str("PCD", pcDigits), space, T.Str("PCL", pcLetters), A.EndOfFile)
pcDigits := a.DigitNotZero.Then(a.Digit.Times(3))
tokDigits := t.Str("PCD", pcDigits)
pcLetter := a.ASCIILower.Or(a.ASCIIUpper)
pcLetters := m.ToUpper(pcLetter.Times(2))
tokLetters := t.Str("PCL", pcLetters)
space := m.Replace(a.Blanks.Optional(), " ")
postcode := tokDigits.Then(space).Then(tokLetters).Then(a.EndOfFile)
// Create a Tokenizer that wraps the 'postcode' Handler and allows
// us to match some input against that handler.

View File

@ -23,7 +23,7 @@ type Func func(interface{}) error
// parsing process.
//
// This function returns a function that can be invoked to run the parser
// on the provided input data. For an overview of allowed inputs, take a
// against the provided input data. For an overview of allowed inputs, take a
// look at the documentation for parsekit.read.New().
func New(startHandler Handler) Func {
if startHandler == nil {

View File

@ -102,11 +102,11 @@ func ExampleAPI_Accept_inSwitchStatement() {
}
func ExampleAPI_Stop() {
C, A := tokenize.C, tokenize.A
c, a := tokenize.C, tokenize.A
parser := parse.New(func(p *parse.API) {
fmt.Printf("First word: ")
for p.Accept(C.Not(A.Space)) {
for p.Accept(c.Not(a.Space)) {
fmt.Printf("%s", p.Result())
}
p.Stop()
@ -118,11 +118,11 @@ func ExampleAPI_Stop() {
}
func ExampleAPI_Stop_notCalledAndNoInputPending() {
C, A := tokenize.C, tokenize.A
c, a := tokenize.C, tokenize.A
parser := parse.New(func(p *parse.API) {
fmt.Printf("Word: ")
for p.Accept(C.Not(A.Space)) {
for p.Accept(c.Not(a.Space)) {
fmt.Printf("%s", p.Result())
}
fmt.Printf("\n")
@ -136,11 +136,11 @@ func ExampleAPI_Stop_notCalledAndNoInputPending() {
}
func ExampleAPI_Stop_notCalledButInputPending() {
C, A := tokenize.C, tokenize.A
c, a := tokenize.C, tokenize.A
parser := parse.New(func(p *parse.API) {
fmt.Printf("First word: ")
for p.Accept(C.Not(A.Space)) {
for p.Accept(c.Not(a.Space)) {
fmt.Printf("%s", p.Result())
}
fmt.Printf("\n")
@ -155,8 +155,8 @@ func ExampleAPI_Stop_notCalledButInputPending() {
func ExampleAPI_Peek() {
// Definition of a fantasy serial number format.
C, A := tokenize.C, tokenize.A
serialnr := C.Seq(A.Asterisk, A.ASCIIUpper, A.ASCIIUpper, A.Digits)
c, a := tokenize.C, tokenize.A
serialnr := c.Seq(a.Asterisk, a.ASCIIUpper, a.ASCIIUpper, a.Digits)
// This handler is able to handle serial numbers.
serialnrHandler := func(p *parse.API) {

View File

@ -1,4 +1,4 @@
// Package read provides a buffered Reader that wraps around an io.Reader.
// Package read provides a buffered input reader that is used to feed data to the tokenizer.
//
// Functionally, it provides an input buffer in the form of a sliding window.
// Let's say we've got the following input coming up in the io.Reader that is

View File

@ -1,11 +1,32 @@
// Package tokenize provides tooling to build a tokenizer in a combinator/parser-style
// that is used to feed data to the parser.
package tokenize
import (
"fmt"
)
// Func is the function signature as returned by New: a function that takes
// any supported type of input, executes a tokenizer run and returns a
// Result struct (possibly nil) and an error (possibly nil).
type Func func(input interface{}) (*Result, error)
// New instantiates a new tokenizer.
//
// The tokenizer is a tokenizing state machine, in which tokenize.Handler
// functions are used to move the state machine forward during tokenizing.
// Using the New function, you can wrap a tokenize.Handler in a simple way,
// making it possible to feed some input to the handler and retrieve the
// tokenizing results.
//
// The startHandler argument points the tokenizer to the tokenize.Handler function
// that must be executed at the start of the tokenizing process. From there on
// other tokenize.Handler functions can be invoked recursively to implement the
// tokenizing process.
//
// THis function returns a function that can be invoked to run the tokenizer
// against the provided input data. For an overview of allowed inputs, take a
// look at the documentation for parsekit.read.New().
func New(tokenHandler Handler) Func {
return func(input interface{}) (*Result, error) {
api := NewAPI(input)

View File

@ -21,7 +21,7 @@ import (
// optional matching using tokenize.C.Any(...) for example. If matching
// for Any(IPv4, Digits), the example case should simply end up with 10
// after the IPv4 mismatch.
func ExampleTokenizer_Execute() {
func ExampleNew() {
// Build the tokenizer for ip/mask.
var c, a, t = tokenize.C, tokenize.A, tokenize.T
ip := t.Str("ip", a.IPv4)